diff --git a/.gitignore b/.gitignore index 1264e45..7941396 100755 --- a/.gitignore +++ b/.gitignore @@ -145,3 +145,5 @@ dmypy.json cython_debug/ # End of https://www.toptal.com/developers/gitignore/api/flask + +*.db diff --git a/.pylintrc b/.pylintrc index c6d73d8..9135ea9 100644 --- a/.pylintrc +++ b/.pylintrc @@ -1,2 +1,3 @@ [MASTER] init-hook="from pylint.config import find_pylintrc; import os, sys; sys.path.append(os.path.dirname(find_pylintrc()))" +extension-pkg-whitelist=pydantic diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 0000000..a691ce0 --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,19 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "Python: FastAPI", + "type": "python", + "request": "launch", + "module": "uvicorn", + "args": [ + "selfprivacy_api.app:app" + ], + "jinja": true, + "justMyCode": false + } + ] +} diff --git a/api.nix b/api.nix new file mode 100644 index 0000000..83bc695 --- /dev/null +++ b/api.nix @@ -0,0 +1,64 @@ +{ lib, python39Packages }: +with python39Packages; +buildPythonApplication { + pname = "selfprivacy-api"; + version = "2.0.0"; + + propagatedBuildInputs = [ + setuptools + portalocker + pytz + pytest + pytest-mock + pytest-datadir + huey + gevent + mnemonic + pydantic + typing-extensions + psutil + fastapi + uvicorn + (buildPythonPackage rec { + pname = "strawberry-graphql"; + version = "0.123.0"; + format = "pyproject"; + patches = [ + ./strawberry-graphql.patch + ]; + propagatedBuildInputs = [ + typing-extensions + python-multipart + python-dateutil + # flask + pydantic + pygments + poetry + # flask-cors + (buildPythonPackage rec { + pname = "graphql-core"; + version = "3.2.0"; + format = "setuptools"; + src = fetchPypi { + inherit pname version; + sha256 = "sha256-huKgvgCL/eGe94OI3opyWh2UKpGQykMcJKYIN5c4A84="; + }; + checkInputs = [ + pytest-asyncio + pytest-benchmark + pytestCheckHook + ]; + pythonImportsCheck = [ + "graphql" + ]; + }) + ]; + src = fetchPypi { + inherit pname version; + sha256 = "KsmZ5Xv8tUg6yBxieAEtvoKoRG60VS+iVGV0X6oCExo="; + }; + }) + ]; + + src = ./.; +} diff --git a/default.nix b/default.nix new file mode 100644 index 0000000..740c7ce --- /dev/null +++ b/default.nix @@ -0,0 +1,2 @@ +{ pkgs ? import {} }: +pkgs.callPackage ./api.nix {} diff --git a/pyproject.toml b/pyproject.toml index 1ffd18c..7f8d872 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,3 +1,3 @@ [build-system] -requires = ["setuptools", "wheel", "portalocker", "flask-swagger", "flask-swagger-ui"] -build-backend = "setuptools.build_meta" \ No newline at end of file +requires = ["setuptools", "wheel", "portalocker"] +build-backend = "setuptools.build_meta" diff --git a/requirements.txt b/requirements.txt deleted file mode 100755 index 4e0e02e..0000000 --- a/requirements.txt +++ /dev/null @@ -1,17 +0,0 @@ -wheel -flask -flask_restful -flask_socketio -setuptools -portalocker -flask-swagger -flask-swagger-ui -pytz -huey -gevent -mnemonic - -pytest -coverage -pytest-mock -pytest-datadir diff --git a/selfprivacy_api/resources/__init__.py b/selfprivacy_api/actions/__init__.py similarity index 100% rename from selfprivacy_api/resources/__init__.py rename to selfprivacy_api/actions/__init__.py diff --git a/selfprivacy_api/actions/api_tokens.py b/selfprivacy_api/actions/api_tokens.py new file mode 100644 index 0000000..61c695d --- /dev/null +++ b/selfprivacy_api/actions/api_tokens.py @@ -0,0 +1,116 @@ +"""App tokens actions""" +from datetime import datetime +from typing import Optional +from pydantic import BaseModel + + +from selfprivacy_api.utils.auth import ( + delete_token, + generate_recovery_token, + get_recovery_token_status, + get_tokens_info, + is_recovery_token_exists, + is_recovery_token_valid, + is_token_name_exists, + is_token_name_pair_valid, + refresh_token, + get_token_name, +) + + +class TokenInfoWithIsCaller(BaseModel): + """Token info""" + + name: str + date: datetime + is_caller: bool + + +def get_api_tokens_with_caller_flag(caller_token: str) -> list[TokenInfoWithIsCaller]: + """Get the tokens info""" + caller_name = get_token_name(caller_token) + tokens = get_tokens_info() + return [ + TokenInfoWithIsCaller( + name=token.name, + date=token.date, + is_caller=token.name == caller_name, + ) + for token in tokens + ] + + +class NotFoundException(Exception): + """Not found exception""" + + +class CannotDeleteCallerException(Exception): + """Cannot delete caller exception""" + + +def delete_api_token(caller_token: str, token_name: str) -> None: + """Delete the token""" + if is_token_name_pair_valid(token_name, caller_token): + raise CannotDeleteCallerException("Cannot delete caller's token") + if not is_token_name_exists(token_name): + raise NotFoundException("Token not found") + delete_token(token_name) + + +def refresh_api_token(caller_token: str) -> str: + """Refresh the token""" + new_token = refresh_token(caller_token) + if new_token is None: + raise NotFoundException("Token not found") + return new_token + + +class RecoveryTokenStatus(BaseModel): + """Recovery token status""" + + exists: bool + valid: bool + date: Optional[datetime] = None + expiration: Optional[datetime] = None + uses_left: Optional[int] = None + + +def get_api_recovery_token_status() -> RecoveryTokenStatus: + """Get the recovery token status""" + if not is_recovery_token_exists(): + return RecoveryTokenStatus(exists=False, valid=False) + status = get_recovery_token_status() + if status is None: + return RecoveryTokenStatus(exists=False, valid=False) + is_valid = is_recovery_token_valid() + return RecoveryTokenStatus( + exists=True, + valid=is_valid, + date=status["date"], + expiration=status["expiration"], + uses_left=status["uses_left"], + ) + + +class InvalidExpirationDate(Exception): + """Invalid expiration date exception""" + + +class InvalidUsesLeft(Exception): + """Invalid uses left exception""" + + +def get_new_api_recovery_key( + expiration_date: Optional[datetime] = None, uses_left: Optional[int] = None +) -> str: + """Get new recovery key""" + if expiration_date is not None: + current_time = datetime.now().timestamp() + if expiration_date.timestamp() < current_time: + raise InvalidExpirationDate("Expiration date is in the past") + if uses_left is not None: + if uses_left <= 0: + raise InvalidUsesLeft("Uses must be greater than 0") + + key = generate_recovery_token(expiration_date, uses_left) + return key diff --git a/selfprivacy_api/actions/ssh.py b/selfprivacy_api/actions/ssh.py new file mode 100644 index 0000000..3f79ff8 --- /dev/null +++ b/selfprivacy_api/actions/ssh.py @@ -0,0 +1,149 @@ +"""Actions to manage the SSH.""" +from typing import Optional +from pydantic import BaseModel +from selfprivacy_api.actions.users import ( + UserNotFound, + ensure_ssh_and_users_fields_exist, +) + +from selfprivacy_api.utils import WriteUserData, ReadUserData, validate_ssh_public_key + + +def enable_ssh(): + with WriteUserData() as data: + if "ssh" not in data: + data["ssh"] = {} + data["ssh"]["enable"] = True + + +class UserdataSshSettings(BaseModel): + """Settings for the SSH.""" + + enable: bool = True + passwordAuthentication: bool = True + rootKeys: list[str] = [] + + +def get_ssh_settings() -> UserdataSshSettings: + with ReadUserData() as data: + if "ssh" not in data: + return UserdataSshSettings() + if "enable" not in data["ssh"]: + data["ssh"]["enable"] = True + if "passwordAuthentication" not in data["ssh"]: + data["ssh"]["passwordAuthentication"] = True + if "rootKeys" not in data["ssh"]: + data["ssh"]["rootKeys"] = [] + return UserdataSshSettings(**data["ssh"]) + + +def set_ssh_settings( + enable: Optional[bool] = None, password_authentication: Optional[bool] = None +) -> None: + with WriteUserData() as data: + if "ssh" not in data: + data["ssh"] = {} + if enable is not None: + data["ssh"]["enable"] = enable + if password_authentication is not None: + data["ssh"]["passwordAuthentication"] = password_authentication + + +def add_root_ssh_key(public_key: str): + with WriteUserData() as data: + if "ssh" not in data: + data["ssh"] = {} + if "rootKeys" not in data["ssh"]: + data["ssh"]["rootKeys"] = [] + # Return 409 if key already in array + for key in data["ssh"]["rootKeys"]: + if key == public_key: + raise KeyAlreadyExists() + data["ssh"]["rootKeys"].append(public_key) + + +class KeyAlreadyExists(Exception): + """Key already exists""" + + pass + + +class InvalidPublicKey(Exception): + """Invalid public key""" + + pass + + +def create_ssh_key(username: str, ssh_key: str): + """Create a new ssh key""" + + if not validate_ssh_public_key(ssh_key): + raise InvalidPublicKey() + + with WriteUserData() as data: + ensure_ssh_and_users_fields_exist(data) + + if username == data["username"]: + if ssh_key in data["sshKeys"]: + raise KeyAlreadyExists() + + data["sshKeys"].append(ssh_key) + return + + if username == "root": + if ssh_key in data["ssh"]["rootKeys"]: + raise KeyAlreadyExists() + + data["ssh"]["rootKeys"].append(ssh_key) + return + + for user in data["users"]: + if user["username"] == username: + if "sshKeys" not in user: + user["sshKeys"] = [] + if ssh_key in user["sshKeys"]: + raise KeyAlreadyExists() + + user["sshKeys"].append(ssh_key) + return + + raise UserNotFound() + + +class KeyNotFound(Exception): + """Key not found""" + + pass + + +def remove_ssh_key(username: str, ssh_key: str): + """Delete a ssh key""" + + with WriteUserData() as data: + ensure_ssh_and_users_fields_exist(data) + + if username == "root": + if ssh_key in data["ssh"]["rootKeys"]: + data["ssh"]["rootKeys"].remove(ssh_key) + return + + raise KeyNotFound() + + if username == data["username"]: + if ssh_key in data["sshKeys"]: + data["sshKeys"].remove(ssh_key) + return + + raise KeyNotFound() + + for user in data["users"]: + if user["username"] == username: + if "sshKeys" not in user: + user["sshKeys"] = [] + if ssh_key in user["sshKeys"]: + user["sshKeys"].remove(ssh_key) + return + + raise KeyNotFound() + + raise UserNotFound() diff --git a/selfprivacy_api/actions/system.py b/selfprivacy_api/actions/system.py new file mode 100644 index 0000000..853662f --- /dev/null +++ b/selfprivacy_api/actions/system.py @@ -0,0 +1,139 @@ +"""Actions to manage the system.""" +import os +import subprocess +import pytz +from typing import Optional +from pydantic import BaseModel + +from selfprivacy_api.utils import WriteUserData, ReadUserData + + +def get_timezone() -> str: + """Get the timezone of the server""" + with ReadUserData() as user_data: + if "timezone" in user_data: + return user_data["timezone"] + return "Europe/Uzhgorod" + + +class InvalidTimezone(Exception): + """Invalid timezone""" + + pass + + +def change_timezone(timezone: str) -> None: + """Change the timezone of the server""" + if timezone not in pytz.all_timezones: + raise InvalidTimezone(f"Invalid timezone: {timezone}") + with WriteUserData() as user_data: + user_data["timezone"] = timezone + + +class UserDataAutoUpgradeSettings(BaseModel): + """Settings for auto-upgrading user data""" + + enable: bool = True + allowReboot: bool = False + + +def get_auto_upgrade_settings() -> UserDataAutoUpgradeSettings: + """Get the auto-upgrade settings""" + with ReadUserData() as user_data: + if "autoUpgrade" in user_data: + return UserDataAutoUpgradeSettings(**user_data["autoUpgrade"]) + return UserDataAutoUpgradeSettings() + + +def set_auto_upgrade_settings( + enalbe: Optional[bool] = None, allowReboot: Optional[bool] = None +) -> None: + """Set the auto-upgrade settings""" + with WriteUserData() as user_data: + if "autoUpgrade" not in user_data: + user_data["autoUpgrade"] = {} + if enalbe is not None: + user_data["autoUpgrade"]["enable"] = enalbe + if allowReboot is not None: + user_data["autoUpgrade"]["allowReboot"] = allowReboot + + +def rebuild_system() -> int: + """Rebuild the system""" + rebuild_result = subprocess.Popen( + ["systemctl", "start", "sp-nixos-rebuild.service"], start_new_session=True + ) + rebuild_result.communicate()[0] + return rebuild_result.returncode + + +def rollback_system() -> int: + """Rollback the system""" + rollback_result = subprocess.Popen( + ["systemctl", "start", "sp-nixos-rollback.service"], start_new_session=True + ) + rollback_result.communicate()[0] + return rollback_result.returncode + + +def upgrade_system() -> int: + """Upgrade the system""" + upgrade_result = subprocess.Popen( + ["systemctl", "start", "sp-nixos-upgrade.service"], start_new_session=True + ) + upgrade_result.communicate()[0] + return upgrade_result.returncode + + +def reboot_system() -> None: + """Reboot the system""" + subprocess.Popen(["reboot"], start_new_session=True) + + +def get_system_version() -> str: + """Get system version""" + return subprocess.check_output(["uname", "-a"]).decode("utf-8").strip() + + +def get_python_version() -> str: + """Get Python version""" + return subprocess.check_output(["python", "-V"]).decode("utf-8").strip() + + +class SystemActionResult(BaseModel): + """System action result""" + + status: int + message: str + data: str + + +def pull_repository_changes() -> SystemActionResult: + """Pull repository changes""" + git_pull_command = ["git", "pull"] + + current_working_directory = os.getcwd() + os.chdir("/etc/nixos") + + git_pull_process_descriptor = subprocess.Popen( + git_pull_command, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + shell=False, + ) + + data = git_pull_process_descriptor.communicate()[0].decode("utf-8") + + os.chdir(current_working_directory) + + if git_pull_process_descriptor.returncode == 0: + return SystemActionResult( + status=0, + message="Pulled repository changes", + data=data, + ) + return SystemActionResult( + status=git_pull_process_descriptor.returncode, + message="Failed to pull repository changes", + data=data, + ) diff --git a/selfprivacy_api/actions/users.py b/selfprivacy_api/actions/users.py new file mode 100644 index 0000000..bfc1756 --- /dev/null +++ b/selfprivacy_api/actions/users.py @@ -0,0 +1,219 @@ +"""Actions to manage the users.""" +import re +from typing import Optional +from pydantic import BaseModel +from enum import Enum +from selfprivacy_api.utils import ( + ReadUserData, + WriteUserData, + hash_password, + is_username_forbidden, +) + + +class UserDataUserOrigin(Enum): + """Origin of the user in the user data""" + + NORMAL = "NORMAL" + PRIMARY = "PRIMARY" + ROOT = "ROOT" + + +class UserDataUser(BaseModel): + """The user model from the userdata file""" + + username: str + ssh_keys: list[str] + origin: UserDataUserOrigin + + +def ensure_ssh_and_users_fields_exist(data): + if "ssh" not in data: + data["ssh"] = {} + data["ssh"]["rootKeys"] = [] + + elif data["ssh"].get("rootKeys") is None: + data["ssh"]["rootKeys"] = [] + + if "sshKeys" not in data: + data["sshKeys"] = [] + + if "users" not in data: + data["users"] = [] + + +def get_users( + exclude_primary: bool = False, + exclude_root: bool = False, +) -> list[UserDataUser]: + """Get the list of users""" + users = [] + with ReadUserData() as user_data: + ensure_ssh_and_users_fields_exist(user_data) + users = [ + UserDataUser( + username=user["username"], + ssh_keys=user.get("sshKeys", []), + origin=UserDataUserOrigin.NORMAL, + ) + for user in user_data["users"] + ] + if not exclude_primary: + users.append( + UserDataUser( + username=user_data["username"], + ssh_keys=user_data["sshKeys"], + origin=UserDataUserOrigin.PRIMARY, + ) + ) + if not exclude_root: + users.append( + UserDataUser( + username="root", + ssh_keys=user_data["ssh"]["rootKeys"], + origin=UserDataUserOrigin.ROOT, + ) + ) + return users + + +class UsernameForbidden(Exception): + """Attemted to create a user with a forbidden username""" + + pass + + +class UserAlreadyExists(Exception): + """Attemted to create a user that already exists""" + + pass + + +class UsernameNotAlphanumeric(Exception): + """Attemted to create a user with a non-alphanumeric username""" + + pass + + +class UsernameTooLong(Exception): + """Attemted to create a user with a too long username. Username must be less than 32 characters""" + + pass + + +class PasswordIsEmpty(Exception): + """Attemted to create a user with an empty password""" + + pass + + +def create_user(username: str, password: str): + if password == "": + raise PasswordIsEmpty("Password is empty") + + if is_username_forbidden(username): + raise UsernameForbidden("Username is forbidden") + + if not re.match(r"^[a-z_][a-z0-9_]+$", username): + raise UsernameNotAlphanumeric( + "Username must be alphanumeric and start with a letter" + ) + + if len(username) >= 32: + raise UsernameTooLong("Username must be less than 32 characters") + + with ReadUserData() as user_data: + ensure_ssh_and_users_fields_exist(user_data) + if username == user_data["username"]: + raise UserAlreadyExists("User already exists") + if username in [user["username"] for user in user_data["users"]]: + raise UserAlreadyExists("User already exists") + + hashed_password = hash_password(password) + + with WriteUserData() as user_data: + ensure_ssh_and_users_fields_exist(user_data) + + user_data["users"].append( + {"username": username, "sshKeys": [], "hashedPassword": hashed_password} + ) + + +class UserNotFound(Exception): + """Attemted to get a user that does not exist""" + + pass + + +class UserIsProtected(Exception): + """Attemted to delete a user that is protected""" + + pass + + +def delete_user(username: str): + with WriteUserData() as user_data: + ensure_ssh_and_users_fields_exist(user_data) + if username == user_data["username"] or username == "root": + raise UserIsProtected("Cannot delete main or root user") + + for data_user in user_data["users"]: + if data_user["username"] == username: + user_data["users"].remove(data_user) + break + else: + raise UserNotFound("User did not exist") + + +def update_user(username: str, password: str): + if password == "": + raise PasswordIsEmpty("Password is empty") + + hashed_password = hash_password(password) + + with WriteUserData() as data: + ensure_ssh_and_users_fields_exist(data) + + if username == data["username"]: + data["hashedMasterPassword"] = hashed_password + + # Return 404 if user does not exist + else: + for data_user in data["users"]: + if data_user["username"] == username: + data_user["hashedPassword"] = hashed_password + break + else: + raise UserNotFound("User does not exist") + + +def get_user_by_username(username: str) -> Optional[UserDataUser]: + with ReadUserData() as data: + ensure_ssh_and_users_fields_exist(data) + + if username == "root": + return UserDataUser( + origin=UserDataUserOrigin.ROOT, + username="root", + ssh_keys=data["ssh"]["rootKeys"], + ) + + if username == data["username"]: + return UserDataUser( + origin=UserDataUserOrigin.PRIMARY, + username=username, + ssh_keys=data["sshKeys"], + ) + + for user in data["users"]: + if user["username"] == username: + if "sshKeys" not in user: + user["sshKeys"] = [] + + return UserDataUser( + origin=UserDataUserOrigin.NORMAL, + username=username, + ssh_keys=user["sshKeys"], + ) + + return None diff --git a/selfprivacy_api/app.py b/selfprivacy_api/app.py index 15142f0..3436445 100644 --- a/selfprivacy_api/app.py +++ b/selfprivacy_api/app.py @@ -1,110 +1,56 @@ #!/usr/bin/env python3 """SelfPrivacy server management API""" -import os -from gevent import monkey +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware +from strawberry.fastapi import GraphQLRouter +import uvicorn -from flask import Flask, request, jsonify -from flask_restful import Api -from flask_swagger import swagger -from flask_swagger_ui import get_swaggerui_blueprint -from flask_cors import CORS - -from strawberry.flask.views import AsyncGraphQLView - -from selfprivacy_api.resources.users import User, Users -from selfprivacy_api.resources.common import ApiVersion -from selfprivacy_api.resources.system import api_system -from selfprivacy_api.resources.services import services as api_services -from selfprivacy_api.resources.api_auth import auth as api_auth - -from selfprivacy_api.restic_controller.tasks import huey, init_restic - -from selfprivacy_api.migrations import run_migrations - -from selfprivacy_api.utils.auth import is_token_valid - +from selfprivacy_api.dependencies import get_api_version from selfprivacy_api.graphql.schema import schema +from selfprivacy_api.migrations import run_migrations +from selfprivacy_api.restic_controller.tasks import init_restic -swagger_blueprint = get_swaggerui_blueprint( - "/api/docs", "/api/swagger.json", config={"app_name": "SelfPrivacy API"} +from selfprivacy_api.rest import ( + system, + users, + api_auth, + services, +) + +app = FastAPI() + +graphql_app = GraphQLRouter( + schema, +) + +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], ) -def create_app(test_config=None): - """Initiate Flask app and bind routes""" - app = Flask(__name__) - api = Api(app) - CORS(app) +app.include_router(system.router) +app.include_router(users.router) +app.include_router(api_auth.router) +app.include_router(services.router) +app.include_router(graphql_app, prefix="/graphql") - if test_config is None: - app.config["ENABLE_SWAGGER"] = os.environ.get("ENABLE_SWAGGER", "0") - app.config["B2_BUCKET"] = os.environ.get("B2_BUCKET") - else: - app.config.update(test_config) - # Check bearer token - @app.before_request - def check_auth(): - # Exclude swagger-ui, /auth/new_device/authorize, /auth/recovery_token/use - if request.path.startswith("/api"): - pass - elif request.path.startswith("/auth/new_device/authorize"): - pass - elif request.path.startswith("/auth/recovery_token/use"): - pass - elif request.path.startswith("/graphql"): - pass - else: - auth = request.headers.get("Authorization") - if auth is None: - return jsonify({"error": "Missing Authorization header"}), 401 - # Strip Bearer from auth header - auth = auth.replace("Bearer ", "") - if not is_token_valid(auth): - return jsonify({"error": "Invalid token"}), 401 +@app.get("/api/version") +async def get_version(): + """Get the version of the server""" + return {"version": get_api_version()} - api.add_resource(ApiVersion, "/api/version") - api.add_resource(Users, "/users") - api.add_resource(User, "/users/") - app.register_blueprint(api_system) - app.register_blueprint(api_services) - app.register_blueprint(api_auth) - - @app.route("/api/swagger.json") - def spec(): - if app.config["ENABLE_SWAGGER"] == "1": - swag = swagger(app) - swag["info"]["version"] = "1.2.7" - swag["info"]["title"] = "SelfPrivacy API" - swag["info"]["description"] = "SelfPrivacy API" - swag["securityDefinitions"] = { - "bearerAuth": { - "type": "apiKey", - "name": "Authorization", - "in": "header", - } - } - swag["security"] = [{"bearerAuth": []}] - - return jsonify(swag) - return jsonify({}), 404 - - app.add_url_rule( - "/graphql", view_func=AsyncGraphQLView.as_view("graphql", schema=schema) - ) - - if app.config["ENABLE_SWAGGER"] == "1": - app.register_blueprint(swagger_blueprint, url_prefix="/api/docs") - - return app +@app.on_event("startup") +async def startup(): + run_migrations() + init_restic() if __name__ == "__main__": - monkey.patch_all() - created_app = create_app() - run_migrations() - huey.start() - init_restic() - created_app.run(port=5050, debug=False) + uvicorn.run("selfprivacy_api.app:app", host="0.0.0.0", port=5050, log_level="info") diff --git a/selfprivacy_api/dependencies.py b/selfprivacy_api/dependencies.py new file mode 100644 index 0000000..109e2ce --- /dev/null +++ b/selfprivacy_api/dependencies.py @@ -0,0 +1,30 @@ +from fastapi import Depends, HTTPException, status +from fastapi.security import APIKeyHeader +from pydantic import BaseModel + +from selfprivacy_api.utils.auth import is_token_valid + + +class TokenHeader(BaseModel): + token: str + + +async def get_token_header( + token: str = Depends(APIKeyHeader(name="Authorization", auto_error=False)) +) -> TokenHeader: + if token is None: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail="Token not provided" + ) + else: + token = token.replace("Bearer ", "") + if not is_token_valid(token): + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid token" + ) + return TokenHeader(token=token) + + +def get_api_version() -> str: + """Get API version""" + return "2.0.0" diff --git a/selfprivacy_api/graphql/__init__.py b/selfprivacy_api/graphql/__init__.py index 5e332f3..7372197 100644 --- a/selfprivacy_api/graphql/__init__.py +++ b/selfprivacy_api/graphql/__init__.py @@ -3,7 +3,6 @@ import typing from strawberry.permission import BasePermission from strawberry.types import Info -from flask import request from selfprivacy_api.utils.auth import is_token_valid @@ -14,11 +13,9 @@ class IsAuthenticated(BasePermission): message = "You must be authenticated to access this resource." def has_permission(self, source: typing.Any, info: Info, **kwargs) -> bool: - auth = request.headers.get("Authorization") - if auth is None: + token = info.context["request"].headers.get("Authorization") + if token is None: + token = info.context["request"].query_params.get("token") + if token is None: return False - # Strip Bearer from auth header - auth = auth.replace("Bearer ", "") - if not is_token_valid(auth): - return False - return True + return is_token_valid(token.replace("Bearer ", "")) diff --git a/selfprivacy_api/graphql/common_types/dns.py b/selfprivacy_api/graphql/common_types/dns.py new file mode 100644 index 0000000..c9f8413 --- /dev/null +++ b/selfprivacy_api/graphql/common_types/dns.py @@ -0,0 +1,13 @@ +import typing +import strawberry + + +@strawberry.type +class DnsRecord: + """DNS record""" + + record_type: str + name: str + content: str + ttl: int + priority: typing.Optional[int] diff --git a/selfprivacy_api/graphql/common_types/jobs.py b/selfprivacy_api/graphql/common_types/jobs.py new file mode 100644 index 0000000..4b095c8 --- /dev/null +++ b/selfprivacy_api/graphql/common_types/jobs.py @@ -0,0 +1,49 @@ +"""Jobs status""" +# pylint: disable=too-few-public-methods +import datetime +import typing +import strawberry + +from selfprivacy_api.jobs import Job, Jobs + + +@strawberry.type +class ApiJob: + """Job type for GraphQL.""" + + uid: str + name: str + description: str + status: str + status_text: typing.Optional[str] + progress: typing.Optional[int] + created_at: datetime.datetime + updated_at: datetime.datetime + finished_at: typing.Optional[datetime.datetime] + error: typing.Optional[str] + result: typing.Optional[str] + + +def job_to_api_job(job: Job) -> ApiJob: + """Convert a Job from jobs controller to a GraphQL ApiJob.""" + return ApiJob( + uid=str(job.uid), + name=job.name, + description=job.description, + status=job.status.name, + status_text=job.status_text, + progress=job.progress, + created_at=job.created_at, + updated_at=job.updated_at, + finished_at=job.finished_at, + error=job.error, + result=job.result, + ) + + +def get_api_job_by_id(job_id: str) -> typing.Optional[ApiJob]: + """Get a job for GraphQL by its ID.""" + job = Jobs.get_instance().get_job(job_id) + if job is None: + return None + return job_to_api_job(job) diff --git a/selfprivacy_api/graphql/common_types/service.py b/selfprivacy_api/graphql/common_types/service.py new file mode 100644 index 0000000..c1246ca --- /dev/null +++ b/selfprivacy_api/graphql/common_types/service.py @@ -0,0 +1,146 @@ +from enum import Enum +import typing +import strawberry +from selfprivacy_api.graphql.common_types.dns import DnsRecord + +from selfprivacy_api.services import get_service_by_id, get_services_by_location +from selfprivacy_api.services import Service as ServiceInterface +from selfprivacy_api.utils.block_devices import BlockDevices + + +def get_usages(root: "StorageVolume") -> list["StorageUsageInterface"]: + """Get usages of a volume""" + return [ + ServiceStorageUsage( + service=service_to_graphql_service(service), + title=service.get_display_name(), + used_space=str(service.get_storage_usage()), + volume=get_volume_by_id(service.get_location()), + ) + for service in get_services_by_location(root.name) + ] + + +@strawberry.type +class StorageVolume: + """Stats and basic info about a volume or a system disk.""" + + total_space: str + free_space: str + used_space: str + root: bool + name: str + model: typing.Optional[str] + serial: typing.Optional[str] + type: str + + @strawberry.field + def usages(self) -> list["StorageUsageInterface"]: + """Get usages of a volume""" + return get_usages(self) + + +@strawberry.interface +class StorageUsageInterface: + used_space: str + volume: typing.Optional[StorageVolume] + title: str + + +@strawberry.type +class ServiceStorageUsage(StorageUsageInterface): + """Storage usage for a service""" + + service: typing.Optional["Service"] + + +@strawberry.enum +class ServiceStatusEnum(Enum): + ACTIVE = "ACTIVE" + RELOADING = "RELOADING" + INACTIVE = "INACTIVE" + FAILED = "FAILED" + ACTIVATING = "ACTIVATING" + DEACTIVATING = "DEACTIVATING" + OFF = "OFF" + + +def get_storage_usage(root: "Service") -> ServiceStorageUsage: + """Get storage usage for a service""" + service = get_service_by_id(root.id) + if service is None: + return ServiceStorageUsage( + service=service, + title="Not found", + used_space="0", + volume=get_volume_by_id("sda1"), + ) + return ServiceStorageUsage( + service=service_to_graphql_service(service), + title=service.get_display_name(), + used_space=str(service.get_storage_usage()), + volume=get_volume_by_id(service.get_location()), + ) + + +@strawberry.type +class Service: + id: str + display_name: str + description: str + svg_icon: str + is_movable: bool + is_required: bool + is_enabled: bool + status: ServiceStatusEnum + url: typing.Optional[str] + dns_records: typing.Optional[typing.List[DnsRecord]] + + @strawberry.field + def storage_usage(self) -> ServiceStorageUsage: + """Get storage usage for a service""" + return get_storage_usage(self) + + +def service_to_graphql_service(service: ServiceInterface) -> Service: + """Convert service to graphql service""" + return Service( + id=service.get_id(), + display_name=service.get_display_name(), + description=service.get_description(), + svg_icon=service.get_svg_icon(), + is_movable=service.is_movable(), + is_required=service.is_required(), + is_enabled=service.is_enabled(), + status=ServiceStatusEnum(service.get_status().value), + url=service.get_url(), + dns_records=[ + DnsRecord( + record_type=record.type, + name=record.name, + content=record.content, + ttl=record.ttl, + priority=record.priority, + ) + for record in service.get_dns_records() + ], + ) + + +def get_volume_by_id(volume_id: str) -> typing.Optional[StorageVolume]: + """Get volume by id""" + volume = BlockDevices().get_block_device(volume_id) + if volume is None: + return None + return StorageVolume( + total_space=str(volume.fssize) + if volume.fssize is not None + else str(volume.size), + free_space=str(volume.fsavail), + used_space=str(volume.fsused), + root=volume.name == "sda1", + name=volume.name, + model=volume.model, + serial=volume.serial, + type=volume.type, + ) diff --git a/selfprivacy_api/graphql/common_types/user.py b/selfprivacy_api/graphql/common_types/user.py index 8cc5f2c..26ad6f2 100644 --- a/selfprivacy_api/graphql/common_types/user.py +++ b/selfprivacy_api/graphql/common_types/user.py @@ -1,8 +1,8 @@ import typing from enum import Enum import strawberry +import selfprivacy_api.actions.users as users_actions -from selfprivacy_api.utils import ReadUserData from selfprivacy_api.graphql.mutations.mutation_interface import ( MutationReturnInterface, ) @@ -28,51 +28,30 @@ class User: class UserMutationReturn(MutationReturnInterface): """Return type for user mutation""" - user: typing.Optional[User] - - -def ensure_ssh_and_users_fields_exist(data): - if "ssh" not in data: - data["ssh"] = [] - data["ssh"]["rootKeys"] = [] - - elif data["ssh"].get("rootKeys") is None: - data["ssh"]["rootKeys"] = [] - - if "sshKeys" not in data: - data["sshKeys"] = [] - - if "users" not in data: - data["users"] = [] + user: typing.Optional[User] = None def get_user_by_username(username: str) -> typing.Optional[User]: - with ReadUserData() as data: - ensure_ssh_and_users_fields_exist(data) - - if username == "root": - return User( - user_type=UserType.ROOT, - username="root", - ssh_keys=data["ssh"]["rootKeys"], - ) - - if username == data["username"]: - return User( - user_type=UserType.PRIMARY, - username=username, - ssh_keys=data["sshKeys"], - ) - - for user in data["users"]: - if user["username"] == username: - if "sshKeys" not in user: - user["sshKeys"] = [] - - return User( - user_type=UserType.NORMAL, - username=username, - ssh_keys=user["sshKeys"], - ) + user = users_actions.get_user_by_username(username) + if user is None: return None + + return User( + user_type=UserType(user.origin.value), + username=user.username, + ssh_keys=user.ssh_keys, + ) + + +def get_users() -> typing.List[User]: + """Get users""" + users = users_actions.get_users(exclude_root=True) + return [ + User( + user_type=UserType(user.origin.value), + username=user.username, + ssh_keys=user.ssh_keys, + ) + for user in users + ] diff --git a/selfprivacy_api/graphql/mutations/api_mutations.py b/selfprivacy_api/graphql/mutations/api_mutations.py index e0d1057..c6727db 100644 --- a/selfprivacy_api/graphql/mutations/api_mutations.py +++ b/selfprivacy_api/graphql/mutations/api_mutations.py @@ -2,8 +2,16 @@ # pylint: disable=too-few-public-methods import datetime import typing -from flask import request import strawberry +from strawberry.types import Info +from selfprivacy_api.actions.api_tokens import ( + CannotDeleteCallerException, + InvalidExpirationDate, + InvalidUsesLeft, + NotFoundException, + delete_api_token, + get_new_api_recovery_key, +) from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.graphql.mutations.mutation_interface import ( GenericMutationReturn, @@ -12,11 +20,7 @@ from selfprivacy_api.graphql.mutations.mutation_interface import ( from selfprivacy_api.utils.auth import ( delete_new_device_auth_token, - delete_token, - generate_recovery_token, get_new_device_auth_token, - is_token_name_exists, - is_token_name_pair_valid, refresh_token, use_mnemonic_recoverery_token, use_new_device_auth_token, @@ -64,27 +68,24 @@ class ApiMutations: self, limits: typing.Optional[RecoveryKeyLimitsInput] = None ) -> ApiKeyMutationReturn: """Generate recovery key""" - if limits is not None: - if limits.expiration_date is not None: - if limits.expiration_date < datetime.datetime.now(): - return ApiKeyMutationReturn( - success=False, - message="Expiration date must be in the future", - code=400, - key=None, - ) - if limits.uses is not None: - if limits.uses < 1: - return ApiKeyMutationReturn( - success=False, - message="Uses must be greater than 0", - code=400, - key=None, - ) - if limits is not None: - key = generate_recovery_token(limits.expiration_date, limits.uses) - else: - key = generate_recovery_token(None, None) + if limits is None: + limits = RecoveryKeyLimitsInput() + try: + key = get_new_api_recovery_key(limits.expiration_date, limits.uses) + except InvalidExpirationDate: + return ApiKeyMutationReturn( + success=False, + message="Expiration date must be in the future", + code=400, + key=None, + ) + except InvalidUsesLeft: + return ApiKeyMutationReturn( + success=False, + message="Uses must be greater than 0", + code=400, + key=None, + ) return ApiKeyMutationReturn( success=True, message="Recovery key generated", @@ -113,12 +114,12 @@ class ApiMutations: ) @strawberry.mutation(permission_classes=[IsAuthenticated]) - def refresh_device_api_token(self) -> DeviceApiTokenMutationReturn: + def refresh_device_api_token(self, info: Info) -> DeviceApiTokenMutationReturn: """Refresh device api token""" token = ( - request.headers.get("Authorization").split(" ")[1] - if request.headers.get("Authorization") is not None - else None + info.context["request"] + .headers.get("Authorization", "") + .replace("Bearer ", "") ) if token is None: return DeviceApiTokenMutationReturn( @@ -143,26 +144,33 @@ class ApiMutations: ) @strawberry.mutation(permission_classes=[IsAuthenticated]) - def delete_device_api_token(self, device: str) -> GenericMutationReturn: + def delete_device_api_token(self, device: str, info: Info) -> GenericMutationReturn: """Delete device api token""" self_token = ( - request.headers.get("Authorization").split(" ")[1] - if request.headers.get("Authorization") is not None - else None + info.context["request"] + .headers.get("Authorization", "") + .replace("Bearer ", "") ) - if self_token is not None and is_token_name_pair_valid(device, self_token): - return GenericMutationReturn( - success=False, - message="Cannot delete caller's token", - code=400, - ) - if not is_token_name_exists(device): + try: + delete_api_token(self_token, device) + except NotFoundException: return GenericMutationReturn( success=False, message="Token not found", code=404, ) - delete_token(device) + except CannotDeleteCallerException: + return GenericMutationReturn( + success=False, + message="Cannot delete caller token", + code=400, + ) + except Exception as e: + return GenericMutationReturn( + success=False, + message=str(e), + code=500, + ) return GenericMutationReturn( success=True, message="Token deleted", diff --git a/selfprivacy_api/graphql/mutations/job_mutations.py b/selfprivacy_api/graphql/mutations/job_mutations.py new file mode 100644 index 0000000..d3a3498 --- /dev/null +++ b/selfprivacy_api/graphql/mutations/job_mutations.py @@ -0,0 +1,27 @@ +"""Manipulate jobs""" +# pylint: disable=too-few-public-methods +import strawberry + +from selfprivacy_api.graphql.mutations.mutation_interface import GenericMutationReturn +from selfprivacy_api.jobs import Jobs + + +@strawberry.type +class JobMutations: + """Mutations related to jobs""" + + @strawberry.mutation + def remove_job(self, job_id: str) -> GenericMutationReturn: + """Remove a job from the queue""" + result = Jobs().remove_by_uuid(job_id) + if result: + return GenericMutationReturn( + success=True, + code=200, + message="Job removed", + ) + return GenericMutationReturn( + success=False, + code=404, + message="Job not found", + ) diff --git a/selfprivacy_api/graphql/mutations/mutation_interface.py b/selfprivacy_api/graphql/mutations/mutation_interface.py index 32146fc..33a6b02 100644 --- a/selfprivacy_api/graphql/mutations/mutation_interface.py +++ b/selfprivacy_api/graphql/mutations/mutation_interface.py @@ -1,4 +1,7 @@ import strawberry +import typing + +from selfprivacy_api.graphql.common_types.jobs import ApiJob @strawberry.interface @@ -11,3 +14,8 @@ class MutationReturnInterface: @strawberry.type class GenericMutationReturn(MutationReturnInterface): pass + + +@strawberry.type +class GenericJobButationReturn(MutationReturnInterface): + job: typing.Optional[ApiJob] = None diff --git a/selfprivacy_api/graphql/mutations/services_mutations.py b/selfprivacy_api/graphql/mutations/services_mutations.py new file mode 100644 index 0000000..38a0d7f --- /dev/null +++ b/selfprivacy_api/graphql/mutations/services_mutations.py @@ -0,0 +1,169 @@ +"""Services mutations""" +# pylint: disable=too-few-public-methods +import typing +import strawberry +from selfprivacy_api.graphql import IsAuthenticated +from selfprivacy_api.graphql.common_types.jobs import job_to_api_job + +from selfprivacy_api.graphql.common_types.service import ( + Service, + service_to_graphql_service, +) +from selfprivacy_api.graphql.mutations.mutation_interface import ( + GenericJobButationReturn, + GenericMutationReturn, +) + +from selfprivacy_api.services import get_service_by_id +from selfprivacy_api.utils.block_devices import BlockDevices + + +@strawberry.type +class ServiceMutationReturn(GenericMutationReturn): + """Service mutation return type.""" + + service: typing.Optional[Service] = None + + +@strawberry.input +class MoveServiceInput: + """Move service input type.""" + + service_id: str + location: str + + +@strawberry.type +class ServiceJobMutationReturn(GenericJobButationReturn): + """Service job mutation return type.""" + + service: typing.Optional[Service] = None + + +@strawberry.type +class ServicesMutations: + """Services mutations.""" + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def enable_service(self, service_id: str) -> ServiceMutationReturn: + """Enable service.""" + service = get_service_by_id(service_id) + if service is None: + return ServiceMutationReturn( + success=False, + message="Service not found.", + code=404, + ) + service.enable() + return ServiceMutationReturn( + success=True, + message="Service enabled.", + code=200, + service=service_to_graphql_service(service), + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def disable_service(self, service_id: str) -> ServiceMutationReturn: + """Disable service.""" + service = get_service_by_id(service_id) + if service is None: + return ServiceMutationReturn( + success=False, + message="Service not found.", + code=404, + ) + service.disable() + return ServiceMutationReturn( + success=True, + message="Service disabled.", + code=200, + service=service_to_graphql_service(service), + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def stop_service(self, service_id: str) -> ServiceMutationReturn: + """Stop service.""" + service = get_service_by_id(service_id) + if service is None: + return ServiceMutationReturn( + success=False, + message="Service not found.", + code=404, + ) + service.stop() + return ServiceMutationReturn( + success=True, + message="Service stopped.", + code=200, + service=service_to_graphql_service(service), + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def start_service(self, service_id: str) -> ServiceMutationReturn: + """Start service.""" + service = get_service_by_id(service_id) + if service is None: + return ServiceMutationReturn( + success=False, + message="Service not found.", + code=404, + ) + service.start() + return ServiceMutationReturn( + success=True, + message="Service started.", + code=200, + service=service_to_graphql_service(service), + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def restart_service(self, service_id: str) -> ServiceMutationReturn: + """Restart service.""" + service = get_service_by_id(service_id) + if service is None: + return ServiceMutationReturn( + success=False, + message="Service not found.", + code=404, + ) + service.restart() + return ServiceMutationReturn( + success=True, + message="Service restarted.", + code=200, + service=service_to_graphql_service(service), + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def move_service(self, input: MoveServiceInput) -> ServiceJobMutationReturn: + """Move service.""" + service = get_service_by_id(input.service_id) + if service is None: + return ServiceJobMutationReturn( + success=False, + message="Service not found.", + code=404, + ) + if not service.is_movable(): + return ServiceJobMutationReturn( + success=False, + message="Service is not movable.", + code=400, + service=service_to_graphql_service(service), + ) + volume = BlockDevices().get_block_device(input.location) + if volume is None: + return ServiceJobMutationReturn( + success=False, + message="Volume not found.", + code=404, + service=service_to_graphql_service(service), + ) + job = service.move_to_volume(volume) + return ServiceJobMutationReturn( + success=True, + message="Service moved.", + code=200, + service=service_to_graphql_service(service), + job=job_to_api_job(job), + ) diff --git a/selfprivacy_api/graphql/mutations/ssh_mutations.py b/selfprivacy_api/graphql/mutations/ssh_mutations.py index b30f474..60f81a8 100644 --- a/selfprivacy_api/graphql/mutations/ssh_mutations.py +++ b/selfprivacy_api/graphql/mutations/ssh_mutations.py @@ -3,9 +3,13 @@ # pylint: disable=too-few-public-methods import strawberry +from selfprivacy_api.actions.users import UserNotFound from selfprivacy_api.graphql import IsAuthenticated -from selfprivacy_api.graphql.mutations.ssh_utils import ( +from selfprivacy_api.actions.ssh import ( + InvalidPublicKey, + KeyAlreadyExists, + KeyNotFound, create_ssh_key, remove_ssh_key, ) @@ -31,12 +35,37 @@ class SshMutations: def add_ssh_key(self, ssh_input: SshMutationInput) -> UserMutationReturn: """Add a new ssh key""" - success, message, code = create_ssh_key(ssh_input.username, ssh_input.ssh_key) + try: + create_ssh_key(ssh_input.username, ssh_input.ssh_key) + except KeyAlreadyExists: + return UserMutationReturn( + success=False, + message="Key already exists", + code=409, + ) + except InvalidPublicKey: + return UserMutationReturn( + success=False, + message="Invalid key type. Only ssh-ed25519 and ssh-rsa are supported", + code=400, + ) + except UserNotFound: + return UserMutationReturn( + success=False, + message="User not found", + code=404, + ) + except Exception as e: + return UserMutationReturn( + success=False, + message=str(e), + code=500, + ) return UserMutationReturn( - success=success, - message=message, - code=code, + success=True, + message="New SSH key successfully written", + code=201, user=get_user_by_username(ssh_input.username), ) @@ -44,11 +73,30 @@ class SshMutations: def remove_ssh_key(self, ssh_input: SshMutationInput) -> UserMutationReturn: """Remove ssh key from user""" - success, message, code = remove_ssh_key(ssh_input.username, ssh_input.ssh_key) + try: + remove_ssh_key(ssh_input.username, ssh_input.ssh_key) + except KeyNotFound: + return UserMutationReturn( + success=False, + message="Key not found", + code=404, + ) + except UserNotFound: + return UserMutationReturn( + success=False, + message="User not found", + code=404, + ) + except Exception as e: + return UserMutationReturn( + success=False, + message=str(e), + code=500, + ) return UserMutationReturn( - success=success, - message=message, - code=code, + success=True, + message="SSH key successfully removed", + code=200, user=get_user_by_username(ssh_input.username), ) diff --git a/selfprivacy_api/graphql/mutations/ssh_utils.py b/selfprivacy_api/graphql/mutations/ssh_utils.py deleted file mode 100644 index 3dbc152..0000000 --- a/selfprivacy_api/graphql/mutations/ssh_utils.py +++ /dev/null @@ -1,74 +0,0 @@ -from selfprivacy_api.graphql.common_types.user import ensure_ssh_and_users_fields_exist -from selfprivacy_api.utils import ( - WriteUserData, - validate_ssh_public_key, -) - - -def create_ssh_key(username: str, ssh_key: str) -> tuple[bool, str, int]: - """Create a new ssh key""" - - if not validate_ssh_public_key(ssh_key): - return ( - False, - "Invalid key type. Only ssh-ed25519 and ssh-rsa are supported", - 400, - ) - - with WriteUserData() as data: - ensure_ssh_and_users_fields_exist(data) - - if username == data["username"]: - if ssh_key in data["sshKeys"]: - return False, "Key already exists", 409 - - data["sshKeys"].append(ssh_key) - return True, "New SSH key successfully written", 201 - - if username == "root": - if ssh_key in data["ssh"]["rootKeys"]: - return False, "Key already exists", 409 - - data["ssh"]["rootKeys"].append(ssh_key) - return True, "New SSH key successfully written", 201 - - for user in data["users"]: - if user["username"] == username: - if ssh_key in user["sshKeys"]: - return False, "Key already exists", 409 - - user["sshKeys"].append(ssh_key) - return True, "New SSH key successfully written", 201 - - return False, "User not found", 404 - - -def remove_ssh_key(username: str, ssh_key: str) -> tuple[bool, str, int]: - """Delete a ssh key""" - - with WriteUserData() as data: - ensure_ssh_and_users_fields_exist(data) - - if username == "root": - if ssh_key in data["ssh"]["rootKeys"]: - data["ssh"]["rootKeys"].remove(ssh_key) - return True, "SSH key deleted", 200 - - return False, "Key not found", 404 - - if username == data["username"]: - if ssh_key in data["sshKeys"]: - data["sshKeys"].remove(ssh_key) - return True, "SSH key deleted", 200 - - return False, "Key not found", 404 - - for user in data["users"]: - if user["username"] == username: - if ssh_key in user["sshKeys"]: - user["sshKeys"].remove(ssh_key) - return True, "SSH key deleted", 200 - - return False, "Key not found", 404 - - return False, "User not found", 404 diff --git a/selfprivacy_api/graphql/mutations/storage_mutation.py b/selfprivacy_api/graphql/mutations/storage_mutations.py similarity index 61% rename from selfprivacy_api/graphql/mutations/storage_mutation.py rename to selfprivacy_api/graphql/mutations/storage_mutations.py index ff69aea..1b6d74e 100644 --- a/selfprivacy_api/graphql/mutations/storage_mutation.py +++ b/selfprivacy_api/graphql/mutations/storage_mutations.py @@ -1,11 +1,28 @@ """Storage devices mutations""" -import typing import strawberry from selfprivacy_api.graphql import IsAuthenticated +from selfprivacy_api.graphql.common_types.jobs import job_to_api_job from selfprivacy_api.utils.block_devices import BlockDevices from selfprivacy_api.graphql.mutations.mutation_interface import ( + GenericJobButationReturn, GenericMutationReturn, ) +from selfprivacy_api.jobs.migrate_to_binds import ( + BindMigrationConfig, + is_bind_migrated, + start_bind_migration, +) + + +@strawberry.input +class MigrateToBindsInput: + """Migrate to binds input""" + + email_block_device: str + bitwarden_block_device: str + gitea_block_device: str + nextcloud_block_device: str + pleroma_block_device: str @strawberry.type @@ -60,3 +77,26 @@ class StorageMutations: return GenericMutationReturn( success=False, code=409, message="Volume not unmounted (already unmounted?)" ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def migrate_to_binds(self, input: MigrateToBindsInput) -> GenericJobButationReturn: + """Migrate to binds""" + if is_bind_migrated(): + return GenericJobButationReturn( + success=False, code=409, message="Already migrated to binds" + ) + job = start_bind_migration( + BindMigrationConfig( + email_block_device=input.email_block_device, + bitwarden_block_device=input.bitwarden_block_device, + gitea_block_device=input.gitea_block_device, + nextcloud_block_device=input.nextcloud_block_device, + pleroma_block_device=input.pleroma_block_device, + ) + ) + return GenericJobButationReturn( + success=True, + code=200, + message="Migration to binds started, rebuild the system to apply changes", + job=job_to_api_job(job), + ) diff --git a/selfprivacy_api/graphql/mutations/system_mutations.py b/selfprivacy_api/graphql/mutations/system_mutations.py index 057c26f..daada17 100644 --- a/selfprivacy_api/graphql/mutations/system_mutations.py +++ b/selfprivacy_api/graphql/mutations/system_mutations.py @@ -1,15 +1,14 @@ """System management mutations""" # pylint: disable=too-few-public-methods -import subprocess import typing -import pytz import strawberry from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.graphql.mutations.mutation_interface import ( GenericMutationReturn, MutationReturnInterface, ) -from selfprivacy_api.utils import WriteUserData + +import selfprivacy_api.actions.system as system_actions @strawberry.type @@ -42,15 +41,15 @@ class SystemMutations: @strawberry.mutation(permission_classes=[IsAuthenticated]) def change_timezone(self, timezone: str) -> TimezoneMutationReturn: """Change the timezone of the server. Timezone is a tzdatabase name.""" - if timezone not in pytz.all_timezones: + try: + system_actions.change_timezone(timezone) + except system_actions.InvalidTimezone as e: return TimezoneMutationReturn( success=False, - message="Invalid timezone", + message=str(e), code=400, timezone=None, ) - with WriteUserData() as data: - data["timezone"] = timezone return TimezoneMutationReturn( success=True, message="Timezone changed", @@ -63,36 +62,23 @@ class SystemMutations: self, settings: AutoUpgradeSettingsInput ) -> AutoUpgradeSettingsMutationReturn: """Change auto upgrade settings of the server.""" - with WriteUserData() as data: - if "autoUpgrade" not in data: - data["autoUpgrade"] = {} - if "enable" not in data["autoUpgrade"]: - data["autoUpgrade"]["enable"] = True - if "allowReboot" not in data["autoUpgrade"]: - data["autoUpgrade"]["allowReboot"] = False + system_actions.set_auto_upgrade_settings( + settings.enableAutoUpgrade, settings.allowReboot + ) - if settings.enableAutoUpgrade is not None: - data["autoUpgrade"]["enable"] = settings.enableAutoUpgrade - if settings.allowReboot is not None: - data["autoUpgrade"]["allowReboot"] = settings.allowReboot - - auto_upgrade = data["autoUpgrade"]["enable"] - allow_reboot = data["autoUpgrade"]["allowReboot"] + new_settings = system_actions.get_auto_upgrade_settings() return AutoUpgradeSettingsMutationReturn( success=True, message="Auto-upgrade settings changed", code=200, - enableAutoUpgrade=auto_upgrade, - allowReboot=allow_reboot, + enableAutoUpgrade=new_settings.enable, + allowReboot=new_settings.allowReboot, ) @strawberry.mutation(permission_classes=[IsAuthenticated]) def run_system_rebuild(self) -> GenericMutationReturn: - rebuild_result = subprocess.Popen( - ["systemctl", "start", "sp-nixos-rebuild.service"], start_new_session=True - ) - rebuild_result.communicate()[0] + system_actions.rebuild_system() return GenericMutationReturn( success=True, message="Starting rebuild system", @@ -101,10 +87,7 @@ class SystemMutations: @strawberry.mutation(permission_classes=[IsAuthenticated]) def run_system_rollback(self) -> GenericMutationReturn: - rollback_result = subprocess.Popen( - ["systemctl", "start", "sp-nixos-rollback.service"], start_new_session=True - ) - rollback_result.communicate()[0] + system_actions.rollback_system() return GenericMutationReturn( success=True, message="Starting rebuild system", @@ -113,10 +96,7 @@ class SystemMutations: @strawberry.mutation(permission_classes=[IsAuthenticated]) def run_system_upgrade(self) -> GenericMutationReturn: - upgrade_result = subprocess.Popen( - ["systemctl", "start", "sp-nixos-upgrade.service"], start_new_session=True - ) - upgrade_result.communicate()[0] + system_actions.upgrade_system() return GenericMutationReturn( success=True, message="Starting rebuild system", @@ -125,9 +105,24 @@ class SystemMutations: @strawberry.mutation(permission_classes=[IsAuthenticated]) def reboot_system(self) -> GenericMutationReturn: - subprocess.Popen(["reboot"], start_new_session=True) + system_actions.reboot_system() return GenericMutationReturn( success=True, message="System reboot has started", code=200, ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def pull_repository_changes(self) -> GenericMutationReturn: + result = system_actions.pull_repository_changes() + if result.status == 0: + return GenericMutationReturn( + success=True, + message="Repository changes pulled", + code=200, + ) + return GenericMutationReturn( + success=False, + message=f"Failed to pull repository changes:\n{result.data}", + code=500, + ) diff --git a/selfprivacy_api/graphql/mutations/users_mutations.py b/selfprivacy_api/graphql/mutations/users_mutations.py index a284ff2..27be1d7 100644 --- a/selfprivacy_api/graphql/mutations/users_mutations.py +++ b/selfprivacy_api/graphql/mutations/users_mutations.py @@ -10,11 +10,7 @@ from selfprivacy_api.graphql.common_types.user import ( from selfprivacy_api.graphql.mutations.mutation_interface import ( GenericMutationReturn, ) -from selfprivacy_api.graphql.mutations.users_utils import ( - create_user, - delete_user, - update_user, -) +import selfprivacy_api.actions.users as users_actions @strawberry.input @@ -31,35 +27,91 @@ class UserMutations: @strawberry.mutation(permission_classes=[IsAuthenticated]) def create_user(self, user: UserMutationInput) -> UserMutationReturn: - - success, message, code = create_user(user.username, user.password) + try: + users_actions.create_user(user.username, user.password) + except users_actions.PasswordIsEmpty as e: + return UserMutationReturn( + success=False, + message=str(e), + code=400, + ) + except users_actions.UsernameForbidden as e: + return UserMutationReturn( + success=False, + message=str(e), + code=409, + ) + except users_actions.UsernameNotAlphanumeric as e: + return UserMutationReturn( + success=False, + message=str(e), + code=400, + ) + except users_actions.UsernameTooLong as e: + return UserMutationReturn( + success=False, + message=str(e), + code=400, + ) + except users_actions.UserAlreadyExists as e: + return UserMutationReturn( + success=False, + message=str(e), + code=409, + user=get_user_by_username(user.username), + ) return UserMutationReturn( - success=success, - message=message, - code=code, + success=True, + message="User created", + code=201, user=get_user_by_username(user.username), ) @strawberry.mutation(permission_classes=[IsAuthenticated]) def delete_user(self, username: str) -> GenericMutationReturn: - success, message, code = delete_user(username) + try: + users_actions.delete_user(username) + except users_actions.UserNotFound as e: + return GenericMutationReturn( + success=False, + message=str(e), + code=404, + ) + except users_actions.UserIsProtected as e: + return GenericMutationReturn( + success=False, + message=str(e), + code=400, + ) return GenericMutationReturn( - success=success, - message=message, - code=code, + success=True, + message="User deleted", + code=200, ) @strawberry.mutation(permission_classes=[IsAuthenticated]) def update_user(self, user: UserMutationInput) -> UserMutationReturn: """Update user mutation""" - - success, message, code = update_user(user.username, user.password) + try: + users_actions.update_user(user.username, user.password) + except users_actions.PasswordIsEmpty as e: + return UserMutationReturn( + success=False, + message=str(e), + code=400, + ) + except users_actions.UserNotFound as e: + return UserMutationReturn( + success=False, + message=str(e), + code=404, + ) return UserMutationReturn( - success=success, - message=message, - code=code, + success=True, + message="User updated", + code=200, user=get_user_by_username(user.username), ) diff --git a/selfprivacy_api/graphql/mutations/users_utils.py b/selfprivacy_api/graphql/mutations/users_utils.py deleted file mode 100644 index f649b45..0000000 --- a/selfprivacy_api/graphql/mutations/users_utils.py +++ /dev/null @@ -1,111 +0,0 @@ -import re -from selfprivacy_api.utils import ( - WriteUserData, - ReadUserData, - is_username_forbidden, -) -from selfprivacy_api.utils import hash_password - - -def ensure_ssh_and_users_fields_exist(data): - if "ssh" not in data: - data["ssh"] = [] - data["ssh"]["rootKeys"] = [] - - elif data["ssh"].get("rootKeys") is None: - data["ssh"]["rootKeys"] = [] - - if "sshKeys" not in data: - data["sshKeys"] = [] - - if "users" not in data: - data["users"] = [] - - -def create_user(username: str, password: str) -> tuple[bool, str, int]: - """Create a new user""" - - # Check if password is null or none - if password == "": - return False, "Password is null", 400 - - # Check if username is forbidden - if is_username_forbidden(username): - return False, "Username is forbidden", 409 - - # Check is username passes regex - if not re.match(r"^[a-z_][a-z0-9_]+$", username): - return False, "Username must be alphanumeric", 400 - - # Check if username less than 32 characters - if len(username) >= 32: - return False, "Username must be less than 32 characters", 400 - - with ReadUserData() as data: - ensure_ssh_and_users_fields_exist(data) - - # Return 409 if user already exists - if data["username"] == username: - return False, "User already exists", 409 - - for data_user in data["users"]: - if data_user["username"] == username: - return False, "User already exists", 409 - - hashed_password = hash_password(password) - - with WriteUserData() as data: - ensure_ssh_and_users_fields_exist(data) - - data["users"].append( - { - "username": username, - "hashedPassword": hashed_password, - "sshKeys": [], - } - ) - - return True, "User was successfully created!", 201 - - -def delete_user(username: str) -> tuple[bool, str, int]: - with WriteUserData() as data: - ensure_ssh_and_users_fields_exist(data) - - if username == data["username"] or username == "root": - return False, "Cannot delete main or root user", 400 - - # Return 404 if user does not exist - for data_user in data["users"]: - if data_user["username"] == username: - data["users"].remove(data_user) - break - else: - return False, "User does not exist", 404 - - return True, "User was deleted", 200 - - -def update_user(username: str, password: str) -> tuple[bool, str, int]: - # Check if password is null or none - if password == "": - return False, "Password is null", 400 - - hashed_password = hash_password(password) - - with WriteUserData() as data: - ensure_ssh_and_users_fields_exist(data) - - if username == data["username"]: - data["hashedMasterPassword"] = hashed_password - - # Return 404 if user does not exist - else: - for data_user in data["users"]: - if data_user["username"] == username: - data_user["hashedPassword"] = hashed_password - break - else: - return False, "User does not exist", 404 - - return True, "User was successfully updated", 200 diff --git a/selfprivacy_api/graphql/queries/api_queries.py b/selfprivacy_api/graphql/queries/api_queries.py index b2a81d2..7994a8f 100644 --- a/selfprivacy_api/graphql/queries/api_queries.py +++ b/selfprivacy_api/graphql/queries/api_queries.py @@ -2,26 +2,23 @@ # pylint: disable=too-few-public-methods import datetime import typing -from flask import request import strawberry +from strawberry.types import Info +from selfprivacy_api.actions.api_tokens import get_api_tokens_with_caller_flag from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.utils import parse_date +from selfprivacy_api.dependencies import get_api_version as get_api_version_dependency from selfprivacy_api.utils.auth import ( get_recovery_token_status, - get_tokens_info, is_recovery_token_exists, is_recovery_token_valid, - is_token_name_exists, - is_token_name_pair_valid, - refresh_token, - get_token_name, ) def get_api_version() -> str: """Get API version""" - return "1.2.7" + return get_api_version_dependency() @strawberry.type @@ -33,24 +30,6 @@ class ApiDevice: is_caller: bool -def get_devices() -> typing.List[ApiDevice]: - """Get list of devices""" - caller_name = get_token_name( - request.headers.get("Authorization").split(" ")[1] - if request.headers.get("Authorization") is not None - else None - ) - tokens = get_tokens_info() - return [ - ApiDevice( - name=token["name"], - creation_date=parse_date(token["date"]), - is_caller=token["name"] == caller_name, - ) - for token in tokens - ] - - @strawberry.type class ApiRecoveryKeyStatus: """Recovery key status""" @@ -97,9 +76,22 @@ class Api: """API access status""" version: str = strawberry.field(resolver=get_api_version) - devices: typing.List[ApiDevice] = strawberry.field( - resolver=get_devices, permission_classes=[IsAuthenticated] - ) + + @strawberry.field(permission_classes=[IsAuthenticated]) + def devices(self, info: Info) -> typing.List[ApiDevice]: + return [ + ApiDevice( + name=device.name, + creation_date=device.date, + is_caller=device.is_caller, + ) + for device in get_api_tokens_with_caller_flag( + info.context["request"] + .headers.get("Authorization", "") + .replace("Bearer ", "") + ) + ] + recovery_key: ApiRecoveryKeyStatus = strawberry.field( resolver=get_recovery_key_status, permission_classes=[IsAuthenticated] ) diff --git a/selfprivacy_api/graphql/queries/jobs.py b/selfprivacy_api/graphql/queries/jobs.py new file mode 100644 index 0000000..426c563 --- /dev/null +++ b/selfprivacy_api/graphql/queries/jobs.py @@ -0,0 +1,25 @@ +"""Jobs status""" +# pylint: disable=too-few-public-methods +import typing +import strawberry +from selfprivacy_api.graphql.common_types.jobs import ( + ApiJob, + get_api_job_by_id, + job_to_api_job, +) + +from selfprivacy_api.jobs import Jobs + + +@strawberry.type +class Job: + @strawberry.field + def get_jobs(self) -> typing.List[ApiJob]: + + Jobs.get_instance().get_jobs() + + return [job_to_api_job(job) for job in Jobs.get_instance().get_jobs()] + + @strawberry.field + def get_job(self, job_id: str) -> typing.Optional[ApiJob]: + return get_api_job_by_id(job_id) diff --git a/selfprivacy_api/graphql/queries/providers.py b/selfprivacy_api/graphql/queries/providers.py index 774d465..6d0381e 100644 --- a/selfprivacy_api/graphql/queries/providers.py +++ b/selfprivacy_api/graphql/queries/providers.py @@ -1,7 +1,5 @@ """Enums representing different service providers.""" from enum import Enum -import datetime -import typing import strawberry diff --git a/selfprivacy_api/graphql/queries/services.py b/selfprivacy_api/graphql/queries/services.py new file mode 100644 index 0000000..5398f81 --- /dev/null +++ b/selfprivacy_api/graphql/queries/services.py @@ -0,0 +1,18 @@ +"""Services status""" +# pylint: disable=too-few-public-methods +import typing +import strawberry + +from selfprivacy_api.graphql.common_types.service import ( + Service, + service_to_graphql_service, +) +from selfprivacy_api.services import get_all_services + + +@strawberry.type +class Services: + @strawberry.field + def all_services(self) -> typing.List[Service]: + services = get_all_services() + return [service_to_graphql_service(service) for service in services] diff --git a/selfprivacy_api/graphql/queries/storage.py b/selfprivacy_api/graphql/queries/storage.py index 6315b26..6800518 100644 --- a/selfprivacy_api/graphql/queries/storage.py +++ b/selfprivacy_api/graphql/queries/storage.py @@ -2,23 +2,13 @@ # pylint: disable=too-few-public-methods import typing import strawberry + +from selfprivacy_api.graphql.common_types.service import ( + StorageVolume, +) from selfprivacy_api.utils.block_devices import BlockDevices -@strawberry.type -class StorageVolume: - """Stats and basic info about a volume or a system disk.""" - - total_space: str - free_space: str - used_space: str - root: bool - name: str - model: typing.Optional[str] - serial: typing.Optional[str] - type: str - - @strawberry.type class Storage: """GraphQL queries to get storage information.""" diff --git a/selfprivacy_api/graphql/queries/system.py b/selfprivacy_api/graphql/queries/system.py index a235e4d..0e2a7ec 100644 --- a/selfprivacy_api/graphql/queries/system.py +++ b/selfprivacy_api/graphql/queries/system.py @@ -1,23 +1,18 @@ """Common system information and settings""" # pylint: disable=too-few-public-methods -import subprocess +import os import typing import strawberry +from selfprivacy_api.graphql.common_types.dns import DnsRecord from selfprivacy_api.graphql.queries.common import Alert, Severity from selfprivacy_api.graphql.queries.providers import DnsProvider, ServerProvider +from selfprivacy_api.jobs import Jobs +from selfprivacy_api.jobs.migrate_to_binds import is_bind_migrated +from selfprivacy_api.services import get_all_required_dns_records from selfprivacy_api.utils import ReadUserData - - -@strawberry.type -class DnsRecord: - """DNS record""" - - recordType: str - name: str - content: str - ttl: int - priority: typing.Optional[int] +import selfprivacy_api.actions.system as system_actions +import selfprivacy_api.actions.ssh as ssh_actions @strawberry.type @@ -27,7 +22,20 @@ class SystemDomainInfo: domain: str hostname: str provider: DnsProvider - required_dns_records: typing.List[DnsRecord] + + @strawberry.field + def required_dns_records(self) -> typing.List[DnsRecord]: + """Collect all required DNS records for all services""" + return [ + DnsRecord( + record_type=record.type, + name=record.name, + content=record.content, + ttl=record.ttl, + priority=record.priority, + ) + for record in get_all_required_dns_records() + ] def get_system_domain_info() -> SystemDomainInfo: @@ -37,8 +45,6 @@ def get_system_domain_info() -> SystemDomainInfo: domain=user_data["domain"], hostname=user_data["hostname"], provider=DnsProvider.CLOUDFLARE, - # TODO: get ip somehow - required_dns_records=[], ) @@ -52,17 +58,11 @@ class AutoUpgradeOptions: def get_auto_upgrade_options() -> AutoUpgradeOptions: """Get automatic upgrade options""" - with ReadUserData() as user_data: - if "autoUpgrade" not in user_data: - return AutoUpgradeOptions(enable=True, allow_reboot=False) - if "enable" not in user_data["autoUpgrade"]: - user_data["autoUpgrade"]["enable"] = True - if "allowReboot" not in user_data["autoUpgrade"]: - user_data["autoUpgrade"]["allowReboot"] = False - return AutoUpgradeOptions( - enable=user_data["autoUpgrade"]["enable"], - allow_reboot=user_data["autoUpgrade"]["allowReboot"], - ) + settings = system_actions.get_auto_upgrade_settings() + return AutoUpgradeOptions( + enable=settings.enable, + allow_reboot=settings.allowReboot, + ) @strawberry.type @@ -76,30 +76,17 @@ class SshSettings: def get_ssh_settings() -> SshSettings: """Get SSH settings""" - with ReadUserData() as user_data: - if "ssh" not in user_data: - return SshSettings( - enable=False, password_authentication=False, root_ssh_keys=[] - ) - if "enable" not in user_data["ssh"]: - user_data["ssh"]["enable"] = False - if "passwordAuthentication" not in user_data["ssh"]: - user_data["ssh"]["passwordAuthentication"] = False - if "rootKeys" not in user_data["ssh"]: - user_data["ssh"]["rootKeys"] = [] - return SshSettings( - enable=user_data["ssh"]["enable"], - password_authentication=user_data["ssh"]["passwordAuthentication"], - root_ssh_keys=user_data["ssh"]["rootKeys"], - ) + settings = ssh_actions.get_ssh_settings() + return SshSettings( + enable=settings.enable, + password_authentication=settings.passwordAuthentication, + root_ssh_keys=settings.rootKeys, + ) def get_system_timezone() -> str: """Get system timezone""" - with ReadUserData() as user_data: - if "timezone" not in user_data: - return "Europe/Uzhgorod" - return user_data["timezone"] + return system_actions.get_timezone() @strawberry.type @@ -115,12 +102,12 @@ class SystemSettings: def get_system_version() -> str: """Get system version""" - return subprocess.check_output(["uname", "-a"]).decode("utf-8").strip() + return system_actions.get_system_version() def get_python_version() -> str: """Get Python version""" - return subprocess.check_output(["python", "-V"]).decode("utf-8").strip() + return system_actions.get_python_version() @strawberry.type @@ -130,6 +117,11 @@ class SystemInfo: system_version: str = strawberry.field(resolver=get_system_version) python_version: str = strawberry.field(resolver=get_python_version) + @strawberry.field + def using_binds(self) -> bool: + """Check if the system is using BINDs""" + return is_bind_migrated() + @strawberry.type class SystemProviderInfo: @@ -162,4 +154,13 @@ class System: settings: SystemSettings = SystemSettings() info: SystemInfo = SystemInfo() provider: SystemProviderInfo = strawberry.field(resolver=get_system_provider_info) - busy: bool = False + + @strawberry.field + def busy(self) -> bool: + """Check if the system is busy""" + return Jobs.is_busy() + + @strawberry.field + def working_directory(self) -> str: + """Get working directory""" + return os.getcwd() diff --git a/selfprivacy_api/graphql/queries/users.py b/selfprivacy_api/graphql/queries/users.py index fc18a84..d2c0555 100644 --- a/selfprivacy_api/graphql/queries/users.py +++ b/selfprivacy_api/graphql/queries/users.py @@ -5,27 +5,12 @@ import strawberry from selfprivacy_api.graphql.common_types.user import ( User, - ensure_ssh_and_users_fields_exist, get_user_by_username, + get_users, ) -from selfprivacy_api.utils import ReadUserData from selfprivacy_api.graphql import IsAuthenticated -def get_users() -> typing.List[User]: - """Get users""" - user_list = [] - with ReadUserData() as data: - ensure_ssh_and_users_fields_exist(data) - - for user in data["users"]: - user_list.append(get_user_by_username(user["username"])) - - user_list.append(get_user_by_username(data["username"])) - - return user_list - - @strawberry.type class Users: @strawberry.field(permission_classes=[IsAuthenticated]) diff --git a/selfprivacy_api/graphql/schema.py b/selfprivacy_api/graphql/schema.py index c2d6a10..dff9304 100644 --- a/selfprivacy_api/graphql/schema.py +++ b/selfprivacy_api/graphql/schema.py @@ -1,19 +1,27 @@ """GraphQL API for SelfPrivacy.""" # pylint: disable=too-few-public-methods +import asyncio +from typing import AsyncGenerator import strawberry from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.graphql.mutations.api_mutations import ApiMutations +from selfprivacy_api.graphql.mutations.job_mutations import JobMutations +from selfprivacy_api.graphql.mutations.mutation_interface import GenericMutationReturn +from selfprivacy_api.graphql.mutations.services_mutations import ServicesMutations from selfprivacy_api.graphql.mutations.ssh_mutations import SshMutations -from selfprivacy_api.graphql.mutations.storage_mutation import StorageMutations +from selfprivacy_api.graphql.mutations.storage_mutations import StorageMutations from selfprivacy_api.graphql.mutations.system_mutations import SystemMutations from selfprivacy_api.graphql.queries.api_queries import Api +from selfprivacy_api.graphql.queries.jobs import Job +from selfprivacy_api.graphql.queries.services import Services from selfprivacy_api.graphql.queries.storage import Storage from selfprivacy_api.graphql.queries.system import System from selfprivacy_api.graphql.mutations.users_mutations import UserMutations from selfprivacy_api.graphql.queries.users import Users +from selfprivacy_api.jobs.test import test_job @strawberry.type @@ -40,6 +48,16 @@ class Query: """Storage queries""" return Storage() + @strawberry.field(permission_classes=[IsAuthenticated]) + def jobs(self) -> Job: + """Jobs queries""" + return Job() + + @strawberry.field(permission_classes=[IsAuthenticated]) + def services(self) -> Services: + """Services queries""" + return Services() + @strawberry.type class Mutation( @@ -48,10 +66,33 @@ class Mutation( UserMutations, SshMutations, StorageMutations, + ServicesMutations, + JobMutations, ): """Root schema for mutations""" + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def test_mutation(self) -> GenericMutationReturn: + """Test mutation""" + test_job() + return GenericMutationReturn( + success=True, + message="Test mutation", + code=200, + ) + pass -schema = strawberry.Schema(query=Query, mutation=Mutation) +@strawberry.type +class Subscription: + """Root schema for subscriptions""" + + @strawberry.subscription(permission_classes=[IsAuthenticated]) + async def count(self, target: int = 100) -> AsyncGenerator[int, None]: + for i in range(target): + yield i + await asyncio.sleep(0.5) + + +schema = strawberry.Schema(query=Query, mutation=Mutation, subscription=Subscription) diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index a467583..09ac254 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -16,12 +16,18 @@ A job is a dictionary with the following keys: """ import typing import datetime +from uuid import UUID +import asyncio import json import os import time import uuid from enum import Enum +from pydantic import BaseModel + +from selfprivacy_api.utils import ReadUserData, UserDataFiles, WriteUserData + class JobStatus(Enum): """ @@ -34,65 +40,23 @@ class JobStatus(Enum): ERROR = "ERROR" -class Job: +class Job(BaseModel): """ Job class. """ - def __init__( - self, - name: str, - description: str, - status: JobStatus, - created_at: datetime.datetime, - updated_at: datetime.datetime, - finished_at: typing.Optional[datetime.datetime], - error: typing.Optional[str], - result: typing.Optional[str], - ): - self.id = str(uuid.uuid4()) - self.name = name - self.description = description - self.status = status - self.created_at = created_at - self.updated_at = updated_at - self.finished_at = finished_at - self.error = error - self.result = result - - def to_dict(self) -> dict: - """ - Convert the job to a dictionary. - """ - return { - "id": self.id, - "name": self.name, - "description": self.description, - "status": self.status, - "created_at": self.created_at, - "updated_at": self.updated_at, - "finished_at": self.finished_at, - "error": self.error, - "result": self.result, - } - - def to_json(self) -> str: - """ - Convert the job to a JSON string. - """ - return json.dumps(self.to_dict()) - - def __str__(self) -> str: - """ - Convert the job to a string. - """ - return self.to_json() - - def __repr__(self) -> str: - """ - Convert the job to a string. - """ - return self.to_json() + uid: UUID = uuid.uuid4() + type_id: str + name: str + description: str + status: JobStatus + status_text: typing.Optional[str] + progress: typing.Optional[int] + created_at: datetime.datetime + updated_at: datetime.datetime + finished_at: typing.Optional[datetime.datetime] + error: typing.Optional[str] + result: typing.Optional[str] class Jobs: @@ -109,6 +73,9 @@ class Jobs: """ if Jobs.__instance is None: Jobs() + if Jobs.__instance is None: + raise Exception("Couldn't init Jobs singleton!") + return Jobs.__instance return Jobs.__instance def __init__(self): @@ -119,41 +86,78 @@ class Jobs: raise Exception("This class is a singleton!") else: Jobs.__instance = self - self.jobs = [] + @staticmethod + def reset() -> None: + """ + Reset the jobs list. + """ + with WriteUserData(UserDataFiles.JOBS) as user_data: + user_data["jobs"] = [] + + @staticmethod def add( - self, name: str, description: str, status: JobStatus = JobStatus.CREATED + name: str, + type_id: str, + description: str, + status: JobStatus = JobStatus.CREATED, + status_text: str = "", + progress: int = 0, ) -> Job: """ Add a job to the jobs list. """ job = Job( name=name, + type_id=type_id, description=description, status=status, + status_text=status_text, + progress=progress, created_at=datetime.datetime.now(), updated_at=datetime.datetime.now(), finished_at=None, error=None, result=None, ) - self.jobs.append(job) + with WriteUserData(UserDataFiles.JOBS) as user_data: + try: + if "jobs" not in user_data: + user_data["jobs"] = [] + user_data["jobs"].append(json.loads(job.json())) + except json.decoder.JSONDecodeError: + user_data["jobs"] = [json.loads(job.json())] return job def remove(self, job: Job) -> None: """ Remove a job from the jobs list. """ - self.jobs.remove(job) + self.remove_by_uuid(str(job.uid)) + def remove_by_uuid(self, job_uuid: str) -> bool: + """ + Remove a job from the jobs list. + """ + with WriteUserData(UserDataFiles.JOBS) as user_data: + if "jobs" not in user_data: + user_data["jobs"] = [] + for i, j in enumerate(user_data["jobs"]): + if j["uid"] == job_uuid: + del user_data["jobs"][i] + return True + return False + + @staticmethod def update( - self, job: Job, - name: typing.Optional[str], - description: typing.Optional[str], status: JobStatus, - error: typing.Optional[str], - result: typing.Optional[str], + status_text: typing.Optional[str] = None, + progress: typing.Optional[int] = None, + name: typing.Optional[str] = None, + description: typing.Optional[str] = None, + error: typing.Optional[str] = None, + result: typing.Optional[str] = None, ) -> Job: """ Update a job in the jobs list. @@ -162,23 +166,62 @@ class Jobs: job.name = name if description is not None: job.description = description + if status_text is not None: + job.status_text = status_text + if progress is not None: + job.progress = progress job.status = status job.updated_at = datetime.datetime.now() job.error = error job.result = result + if status in (JobStatus.FINISHED, JobStatus.ERROR): + job.finished_at = datetime.datetime.now() + + with WriteUserData(UserDataFiles.JOBS) as user_data: + if "jobs" not in user_data: + user_data["jobs"] = [] + for i, j in enumerate(user_data["jobs"]): + if j["uid"] == str(job.uid): + user_data["jobs"][i] = json.loads(job.json()) + break + return job - def get_job(self, id: str) -> typing.Optional[Job]: + @staticmethod + def get_job(uid: str) -> typing.Optional[Job]: """ Get a job from the jobs list. """ - for job in self.jobs: - if job.id == id: - return job + with ReadUserData(UserDataFiles.JOBS) as user_data: + if "jobs" not in user_data: + user_data["jobs"] = [] + for job in user_data["jobs"]: + if job["uid"] == uid: + return Job(**job) return None - def get_jobs(self) -> list: + @staticmethod + def get_jobs() -> typing.List[Job]: """ Get the jobs list. """ - return self.jobs + with ReadUserData(UserDataFiles.JOBS) as user_data: + try: + if "jobs" not in user_data: + user_data["jobs"] = [] + return [Job(**job) for job in user_data["jobs"]] + except json.decoder.JSONDecodeError: + return [] + + @staticmethod + def is_busy() -> bool: + """ + Check if there is a job running. + """ + with ReadUserData(UserDataFiles.JOBS) as user_data: + if "jobs" not in user_data: + user_data["jobs"] = [] + for job in user_data["jobs"]: + if job["status"] == JobStatus.RUNNING.value: + return True + return False diff --git a/selfprivacy_api/jobs/migrate_to_binds.py b/selfprivacy_api/jobs/migrate_to_binds.py new file mode 100644 index 0000000..346023d --- /dev/null +++ b/selfprivacy_api/jobs/migrate_to_binds.py @@ -0,0 +1,291 @@ +"""Function to perform migration of app data to binds.""" +import subprocess +import pathlib +import shutil + +from pydantic import BaseModel +from selfprivacy_api.jobs import Job, JobStatus, Jobs +from selfprivacy_api.services.bitwarden import Bitwarden +from selfprivacy_api.services.gitea import Gitea +from selfprivacy_api.services.mailserver import MailServer +from selfprivacy_api.services.nextcloud import Nextcloud +from selfprivacy_api.services.pleroma import Pleroma +from selfprivacy_api.utils import ReadUserData, WriteUserData +from selfprivacy_api.utils.huey import huey +from selfprivacy_api.utils.block_devices import BlockDevices + + +class BindMigrationConfig(BaseModel): + """Config for bind migration. + For each service provide block device name. + """ + + email_block_device: str + bitwarden_block_device: str + gitea_block_device: str + nextcloud_block_device: str + pleroma_block_device: str + + +def is_bind_migrated() -> bool: + """Check if bind migration was performed.""" + with ReadUserData() as user_data: + return user_data.get("useBinds", False) + + +def activate_binds(config: BindMigrationConfig): + """Activate binds.""" + # Activate binds in userdata + with WriteUserData() as user_data: + if "email" not in user_data: + user_data["email"] = {} + user_data["email"]["location"] = config.email_block_device + if "bitwarden" not in user_data: + user_data["bitwarden"] = {} + user_data["bitwarden"]["location"] = config.bitwarden_block_device + if "gitea" not in user_data: + user_data["gitea"] = {} + user_data["gitea"]["location"] = config.gitea_block_device + if "nextcloud" not in user_data: + user_data["nextcloud"] = {} + user_data["nextcloud"]["location"] = config.nextcloud_block_device + if "pleroma" not in user_data: + user_data["pleroma"] = {} + user_data["pleroma"]["location"] = config.pleroma_block_device + + user_data["useBinds"] = True + + +def move_folder( + data_path: pathlib.Path, bind_path: pathlib.Path, user: str, group: str +): + """Move folder from data to bind.""" + if data_path.exists(): + shutil.move(str(data_path), str(bind_path)) + else: + return + + data_path.mkdir(mode=0o750, parents=True, exist_ok=True) + + shutil.chown(str(bind_path), user=user, group=group) + shutil.chown(str(data_path), user=user, group=group) + + subprocess.run(["mount", "--bind", str(bind_path), str(data_path)], check=True) + + subprocess.run(["chown", "-R", f"{user}:{group}", str(data_path)], check=True) + + +@huey.task() +def migrate_to_binds(config: BindMigrationConfig, job: Job): + """Migrate app data to binds.""" + + # Exit if migration is already done + if is_bind_migrated(): + Jobs.update( + job=job, + status=JobStatus.ERROR, + error="Migration already done.", + ) + return + + Jobs.update( + job=job, + status=JobStatus.RUNNING, + progress=0, + status_text="Checking if all volumes are available.", + ) + # Get block devices. + block_devices = BlockDevices().get_block_devices() + block_device_names = [device.name for device in block_devices] + + # Get all unique required block devices + required_block_devices = [] + for block_device_name in config.__dict__.values(): + if block_device_name not in required_block_devices: + required_block_devices.append(block_device_name) + + # Check if all block devices from config are present. + for block_device_name in required_block_devices: + if block_device_name not in block_device_names: + Jobs.update( + job=job, + status=JobStatus.ERROR, + error=f"Block device {block_device_name} not found.", + ) + return + + # Make sure all required block devices are mounted. + # sda1 is the root partition and is always mounted. + for block_device_name in required_block_devices: + if block_device_name == "sda1": + continue + block_device = BlockDevices().get_block_device(block_device_name) + if block_device is None: + Jobs.update( + job=job, + status=JobStatus.ERROR, + error=f"Block device {block_device_name} not found.", + ) + return + if f"/volumes/{block_device_name}" not in block_device.mountpoints: + Jobs.update( + job=job, + status=JobStatus.ERROR, + error=f"Block device {block_device_name} not mounted.", + ) + return + + # Make sure /volumes/sda1 exists. + pathlib.Path("/volumes/sda1").mkdir(parents=True, exist_ok=True) + + Jobs.update( + job=job, + status=JobStatus.RUNNING, + progress=5, + status_text="Activating binds in NixOS config.", + ) + + activate_binds(config) + + # Perform migration of Nextcloud. + Jobs.update( + job=job, + status=JobStatus.RUNNING, + progress=10, + status_text="Migrating Nextcloud.", + ) + + Nextcloud().stop() + + move_folder( + data_path=pathlib.Path("/var/lib/nextcloud"), + bind_path=pathlib.Path(f"/volumes/{config.nextcloud_block_device}/nextcloud"), + user="nextcloud", + group="nextcloud", + ) + + # Start Nextcloud + Nextcloud().start() + + # Perform migration of Bitwarden + + Jobs.update( + job=job, + status=JobStatus.RUNNING, + progress=28, + status_text="Migrating Bitwarden.", + ) + + Bitwarden().stop() + + move_folder( + data_path=pathlib.Path("/var/lib/bitwarden"), + bind_path=pathlib.Path(f"/volumes/{config.bitwarden_block_device}/bitwarden"), + user="vaultwarden", + group="vaultwarden", + ) + + move_folder( + data_path=pathlib.Path("/var/lib/bitwarden_rs"), + bind_path=pathlib.Path( + f"/volumes/{config.bitwarden_block_device}/bitwarden_rs" + ), + user="vaultwarden", + group="vaultwarden", + ) + + # Start Bitwarden + Bitwarden().start() + + # Perform migration of Gitea + + Jobs.update( + job=job, + status=JobStatus.RUNNING, + progress=46, + status_text="Migrating Gitea.", + ) + + Gitea().stop() + + move_folder( + data_path=pathlib.Path("/var/lib/gitea"), + bind_path=pathlib.Path(f"/volumes/{config.gitea_block_device}/gitea"), + user="gitea", + group="gitea", + ) + + Gitea().start() + + # Perform migration of Mail server + + Jobs.update( + job=job, + status=JobStatus.RUNNING, + progress=64, + status_text="Migrating Mail server.", + ) + + MailServer().stop() + + move_folder( + data_path=pathlib.Path("/var/vmail"), + bind_path=pathlib.Path(f"/volumes/{config.email_block_device}/vmail"), + user="virtualMail", + group="virtualMail", + ) + + move_folder( + data_path=pathlib.Path("/var/sieve"), + bind_path=pathlib.Path(f"/volumes/{config.email_block_device}/sieve"), + user="virtualMail", + group="virtualMail", + ) + + MailServer().start() + + # Perform migration of Pleroma + + Jobs.update( + job=job, + status=JobStatus.RUNNING, + progress=82, + status_text="Migrating Pleroma.", + ) + + Pleroma().stop() + + move_folder( + data_path=pathlib.Path("/var/lib/pleroma"), + bind_path=pathlib.Path(f"/volumes/{config.pleroma_block_device}/pleroma"), + user="pleroma", + group="pleroma", + ) + + move_folder( + data_path=pathlib.Path("/var/lib/postgresql"), + bind_path=pathlib.Path(f"/volumes/{config.pleroma_block_device}/postgresql"), + user="postgres", + group="postgres", + ) + + Pleroma().start() + + Jobs.update( + job=job, + status=JobStatus.FINISHED, + progress=100, + status_text="Migration finished.", + result="Migration finished.", + ) + + +def start_bind_migration(config: BindMigrationConfig) -> Job: + """Start migration.""" + job = Jobs.add( + type_id="migrations.migrate_to_binds", + name="Migrate to binds", + description="Migration required to use the new disk space management.", + ) + migrate_to_binds(config, job) + return job diff --git a/selfprivacy_api/jobs/test.py b/selfprivacy_api/jobs/test.py new file mode 100644 index 0000000..9d93fb7 --- /dev/null +++ b/selfprivacy_api/jobs/test.py @@ -0,0 +1,57 @@ +import time +from selfprivacy_api.utils.huey import huey +from selfprivacy_api.jobs import JobStatus, Jobs + + +@huey.task() +def test_job(): + job = Jobs.get_instance().add( + type_id="test", + name="Test job", + description="This is a test job.", + status=JobStatus.CREATED, + status_text="", + progress=0, + ) + time.sleep(5) + Jobs.get_instance().update( + job=job, + status=JobStatus.RUNNING, + status_text="Performing pre-move checks...", + progress=5, + ) + time.sleep(5) + Jobs.get_instance().update( + job=job, + status=JobStatus.RUNNING, + status_text="Performing pre-move checks...", + progress=10, + ) + time.sleep(5) + Jobs.get_instance().update( + job=job, + status=JobStatus.RUNNING, + status_text="Performing pre-move checks...", + progress=15, + ) + time.sleep(5) + Jobs.get_instance().update( + job=job, + status=JobStatus.RUNNING, + status_text="Performing pre-move checks...", + progress=20, + ) + time.sleep(5) + Jobs.get_instance().update( + job=job, + status=JobStatus.RUNNING, + status_text="Performing pre-move checks...", + progress=25, + ) + time.sleep(5) + Jobs.get_instance().update( + job=job, + status=JobStatus.FINISHED, + status_text="Job finished.", + progress=100, + ) diff --git a/selfprivacy_api/resources/api_auth/__init__.py b/selfprivacy_api/resources/api_auth/__init__.py deleted file mode 100644 index 9bd1703..0000000 --- a/selfprivacy_api/resources/api_auth/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -#!/usr/bin/env python3 -"""API authentication module""" - -from flask import Blueprint -from flask_restful import Api - -auth = Blueprint("auth", __name__, url_prefix="/auth") -api = Api(auth) - -from . import ( - new_device, - recovery_token, - app_tokens, -) diff --git a/selfprivacy_api/resources/api_auth/app_tokens.py b/selfprivacy_api/resources/api_auth/app_tokens.py deleted file mode 100644 index 940c60a..0000000 --- a/selfprivacy_api/resources/api_auth/app_tokens.py +++ /dev/null @@ -1,118 +0,0 @@ -#!/usr/bin/env python3 -"""App tokens management module""" -from flask import request -from flask_restful import Resource, reqparse - -from selfprivacy_api.resources.api_auth import api -from selfprivacy_api.utils.auth import ( - delete_token, - get_tokens_info, - is_token_name_exists, - is_token_name_pair_valid, - refresh_token, - get_token_name, -) - - -class Tokens(Resource): - """Token management class - GET returns the list of active devices. - DELETE invalidates token unless it is the last one or the caller uses this token. - POST refreshes the token of the caller. - """ - - def get(self): - """ - Get current device tokens - --- - tags: - - Tokens - security: - - bearerAuth: [] - responses: - 200: - description: List of tokens - 400: - description: Bad request - """ - caller_name = get_token_name(request.headers.get("Authorization").split(" ")[1]) - tokens = get_tokens_info() - # Retrun a list of tokens and if it is the caller's token - # it will be marked with a flag - return [ - { - "name": token["name"], - "date": token["date"], - "is_caller": token["name"] == caller_name, - } - for token in tokens - ] - - def delete(self): - """ - Delete token - --- - tags: - - Tokens - security: - - bearerAuth: [] - parameters: - - in: body - name: token - required: true - description: Token's name to delete - schema: - type: object - properties: - token_name: - type: string - description: Token name to delete - required: true - responses: - 200: - description: Token deleted - 400: - description: Bad request - 404: - description: Token not found - """ - parser = reqparse.RequestParser() - parser.add_argument( - "token_name", type=str, required=True, help="Token to delete" - ) - args = parser.parse_args() - token_name = args["token_name"] - if is_token_name_pair_valid( - token_name, request.headers.get("Authorization").split(" ")[1] - ): - return {"message": "Cannot delete caller's token"}, 400 - if not is_token_name_exists(token_name): - return {"message": "Token not found"}, 404 - delete_token(token_name) - return {"message": "Token deleted"}, 200 - - def post(self): - """ - Refresh token - --- - tags: - - Tokens - security: - - bearerAuth: [] - responses: - 200: - description: Token refreshed - 400: - description: Bad request - 404: - description: Token not found - """ - # Get token from header - token = request.headers.get("Authorization").split(" ")[1] - new_token = refresh_token(token) - if new_token is None: - return {"message": "Token not found"}, 404 - return {"token": new_token}, 200 - - -api.add_resource(Tokens, "/tokens") diff --git a/selfprivacy_api/resources/api_auth/new_device.py b/selfprivacy_api/resources/api_auth/new_device.py deleted file mode 100644 index 2c0bde1..0000000 --- a/selfprivacy_api/resources/api_auth/new_device.py +++ /dev/null @@ -1,103 +0,0 @@ -#!/usr/bin/env python3 -"""New device auth module""" -from flask_restful import Resource, reqparse - -from selfprivacy_api.resources.api_auth import api -from selfprivacy_api.utils.auth import ( - get_new_device_auth_token, - use_new_device_auth_token, - delete_new_device_auth_token, -) - - -class NewDevice(Resource): - """New device auth class - POST returns a new token for the caller. - """ - - def post(self): - """ - Get new device token - --- - tags: - - Tokens - security: - - bearerAuth: [] - responses: - 200: - description: New device token - 400: - description: Bad request - """ - token = get_new_device_auth_token() - return {"token": token} - - def delete(self): - """ - Delete new device token - --- - tags: - - Tokens - security: - - bearerAuth: [] - responses: - 200: - description: New device token deleted - 400: - description: Bad request - """ - delete_new_device_auth_token() - return {"token": None} - - -class AuthorizeDevice(Resource): - """Authorize device class - POST authorizes the caller. - """ - - def post(self): - """ - Authorize device - --- - tags: - - Tokens - parameters: - - in: body - name: data - required: true - description: Who is authorizing - schema: - type: object - properties: - token: - type: string - description: Mnemonic token to authorize - device: - type: string - description: Device to authorize - responses: - 200: - description: Device authorized - 400: - description: Bad request - 404: - description: Token not found - """ - parser = reqparse.RequestParser() - parser.add_argument( - "token", type=str, required=True, help="Mnemonic token to authorize" - ) - parser.add_argument( - "device", type=str, required=True, help="Device to authorize" - ) - args = parser.parse_args() - auth_token = args["token"] - device = args["device"] - token = use_new_device_auth_token(auth_token, device) - if token is None: - return {"message": "Token not found"}, 404 - return {"message": "Device authorized", "token": token}, 200 - - -api.add_resource(NewDevice, "/new_device") -api.add_resource(AuthorizeDevice, "/new_device/authorize") diff --git a/selfprivacy_api/resources/api_auth/recovery_token.py b/selfprivacy_api/resources/api_auth/recovery_token.py deleted file mode 100644 index 912a50b..0000000 --- a/selfprivacy_api/resources/api_auth/recovery_token.py +++ /dev/null @@ -1,205 +0,0 @@ -#!/usr/bin/env python3 -"""Recovery token module""" -from datetime import datetime -from flask_restful import Resource, reqparse - -from selfprivacy_api.resources.api_auth import api -from selfprivacy_api.utils import parse_date -from selfprivacy_api.utils.auth import ( - is_recovery_token_exists, - is_recovery_token_valid, - get_recovery_token_status, - generate_recovery_token, - use_mnemonic_recoverery_token, -) - - -class RecoveryToken(Resource): - """Recovery token class - GET returns the status of the recovery token. - POST generates a new recovery token. - """ - - def get(self): - """ - Get recovery token status - --- - tags: - - Tokens - security: - - bearerAuth: [] - responses: - 200: - description: Recovery token status - schema: - type: object - properties: - exists: - type: boolean - description: Recovery token exists - valid: - type: boolean - description: Recovery token is valid - date: - type: string - description: Recovery token date - expiration: - type: string - description: Recovery token expiration date - uses_left: - type: integer - description: Recovery token uses left - 400: - description: Bad request - """ - if not is_recovery_token_exists(): - return { - "exists": False, - "valid": False, - "date": None, - "expiration": None, - "uses_left": None, - } - status = get_recovery_token_status() - # check if status is None - if status is None: - return { - "exists": False, - "valid": False, - "date": None, - "expiration": None, - "uses_left": None, - } - - if not is_recovery_token_valid(): - return { - "exists": True, - "valid": False, - "date": status["date"], - "expiration": status["expiration"], - "uses_left": status["uses_left"], - } - return { - "exists": True, - "valid": True, - "date": status["date"], - "expiration": status["expiration"], - "uses_left": status["uses_left"], - } - - def post(self): - """ - Generate recovery token - --- - tags: - - Tokens - security: - - bearerAuth: [] - parameters: - - in: body - name: data - required: true - description: Token data - schema: - type: object - properties: - expiration: - type: string - description: Token expiration date - uses: - type: integer - description: Token uses - responses: - 200: - description: Recovery token generated - schema: - type: object - properties: - token: - type: string - description: Mnemonic recovery token - 400: - description: Bad request - """ - parser = reqparse.RequestParser() - parser.add_argument( - "expiration", type=str, required=False, help="Token expiration date" - ) - parser.add_argument("uses", type=int, required=False, help="Token uses") - args = parser.parse_args() - # Convert expiration date to datetime and return 400 if it is not valid - if args["expiration"]: - try: - expiration = parse_date(args["expiration"]) - # Retrun 400 if expiration date is in the past - if expiration < datetime.now(): - return {"message": "Expiration date cannot be in the past"}, 400 - except ValueError: - return { - "error": "Invalid expiration date. Use YYYY-MM-DDTHH:MM:SS.SSS" - }, 400 - else: - expiration = None - if args["uses"] is not None and args["uses"] < 1: - return {"message": "Uses must be greater than 0"}, 400 - # Generate recovery token - token = generate_recovery_token(expiration, args["uses"]) - return {"token": token} - - -class UseRecoveryToken(Resource): - """Use recovery token class - POST uses the recovery token. - """ - - def post(self): - """ - Use recovery token - --- - tags: - - Tokens - parameters: - - in: body - name: data - required: true - description: Token data - schema: - type: object - properties: - token: - type: string - description: Mnemonic recovery token - device: - type: string - description: Device to authorize - responses: - 200: - description: Recovery token used - schema: - type: object - properties: - token: - type: string - description: Device authorization token - 400: - description: Bad request - 404: - description: Token not found - """ - parser = reqparse.RequestParser() - parser.add_argument( - "token", type=str, required=True, help="Mnemonic recovery token" - ) - parser.add_argument( - "device", type=str, required=True, help="Device to authorize" - ) - args = parser.parse_args() - # Use recovery token - token = use_mnemonic_recoverery_token(args["token"], args["device"]) - if token is None: - return {"error": "Token not found"}, 404 - return {"token": token} - - -api.add_resource(RecoveryToken, "/recovery_token") -api.add_resource(UseRecoveryToken, "/recovery_token/use") diff --git a/selfprivacy_api/resources/common.py b/selfprivacy_api/resources/common.py deleted file mode 100644 index f78aad6..0000000 --- a/selfprivacy_api/resources/common.py +++ /dev/null @@ -1,27 +0,0 @@ -#!/usr/bin/env python3 -"""Unassigned views""" -from flask_restful import Resource -from selfprivacy_api.graphql.queries.api_queries import get_api_version - - -class ApiVersion(Resource): - """SelfPrivacy API version""" - - def get(self): - """Get API version - --- - tags: - - System - responses: - 200: - description: API version - schema: - type: object - properties: - version: - type: string - description: API version - 401: - description: Unauthorized - """ - return {"version": get_api_version()} diff --git a/selfprivacy_api/resources/services/__init__.py b/selfprivacy_api/resources/services/__init__.py deleted file mode 100644 index a7f1dbe..0000000 --- a/selfprivacy_api/resources/services/__init__.py +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/env python3 -"""Services management module""" -from flask import Blueprint -from flask_restful import Api - -services = Blueprint("services", __name__, url_prefix="/services") -api = Api(services) - -from . import ( - bitwarden, - gitea, - mailserver, - main, - nextcloud, - ocserv, - pleroma, - restic, - ssh, -) diff --git a/selfprivacy_api/resources/services/bitwarden.py b/selfprivacy_api/resources/services/bitwarden.py deleted file mode 100644 index 412ba8a..0000000 --- a/selfprivacy_api/resources/services/bitwarden.py +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env python3 -"""Bitwarden management module""" -from flask_restful import Resource - -from selfprivacy_api.resources.services import api -from selfprivacy_api.utils import WriteUserData - - -class EnableBitwarden(Resource): - """Enable Bitwarden""" - - def post(self): - """ - Enable Bitwarden - --- - tags: - - Bitwarden - security: - - bearerAuth: [] - responses: - 200: - description: Bitwarden enabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "bitwarden" not in data: - data["bitwarden"] = {} - data["bitwarden"]["enable"] = True - - return { - "status": 0, - "message": "Bitwarden enabled", - } - - -class DisableBitwarden(Resource): - """Disable Bitwarden""" - - def post(self): - """ - Disable Bitwarden - --- - tags: - - Bitwarden - security: - - bearerAuth: [] - responses: - 200: - description: Bitwarden disabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "bitwarden" not in data: - data["bitwarden"] = {} - data["bitwarden"]["enable"] = False - - return { - "status": 0, - "message": "Bitwarden disabled", - } - - -api.add_resource(EnableBitwarden, "/bitwarden/enable") -api.add_resource(DisableBitwarden, "/bitwarden/disable") diff --git a/selfprivacy_api/resources/services/gitea.py b/selfprivacy_api/resources/services/gitea.py deleted file mode 100644 index bd4b8de..0000000 --- a/selfprivacy_api/resources/services/gitea.py +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env python3 -"""Gitea management module""" -from flask_restful import Resource - -from selfprivacy_api.resources.services import api -from selfprivacy_api.utils import WriteUserData - - -class EnableGitea(Resource): - """Enable Gitea""" - - def post(self): - """ - Enable Gitea - --- - tags: - - Gitea - security: - - bearerAuth: [] - responses: - 200: - description: Gitea enabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "gitea" not in data: - data["gitea"] = {} - data["gitea"]["enable"] = True - - return { - "status": 0, - "message": "Gitea enabled", - } - - -class DisableGitea(Resource): - """Disable Gitea""" - - def post(self): - """ - Disable Gitea - --- - tags: - - Gitea - security: - - bearerAuth: [] - responses: - 200: - description: Gitea disabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "gitea" not in data: - data["gitea"] = {} - data["gitea"]["enable"] = False - - return { - "status": 0, - "message": "Gitea disabled", - } - - -api.add_resource(EnableGitea, "/gitea/enable") -api.add_resource(DisableGitea, "/gitea/disable") diff --git a/selfprivacy_api/resources/services/mailserver.py b/selfprivacy_api/resources/services/mailserver.py deleted file mode 100644 index 01fa574..0000000 --- a/selfprivacy_api/resources/services/mailserver.py +++ /dev/null @@ -1,41 +0,0 @@ -#!/usr/bin/env python3 -"""Mail server management module""" -import base64 -import subprocess -import os -from flask_restful import Resource - -from selfprivacy_api.resources.services import api - -from selfprivacy_api.utils import get_dkim_key, get_domain - - -class DKIMKey(Resource): - """Get DKIM key from file""" - - def get(self): - """ - Get DKIM key from file - --- - tags: - - Email - security: - - bearerAuth: [] - responses: - 200: - description: DKIM key encoded in base64 - 401: - description: Unauthorized - 404: - description: DKIM key not found - """ - domain = get_domain() - - dkim = get_dkim_key(domain) - if dkim is None: - return "DKIM file not found", 404 - dkim = base64.b64encode(dkim.encode("utf-8")).decode("utf-8") - return dkim - - -api.add_resource(DKIMKey, "/mailserver/dkim") diff --git a/selfprivacy_api/resources/services/main.py b/selfprivacy_api/resources/services/main.py deleted file mode 100644 index 8b6743c..0000000 --- a/selfprivacy_api/resources/services/main.py +++ /dev/null @@ -1,84 +0,0 @@ -#!/usr/bin/env python3 -"""Services status module""" -import subprocess -from flask_restful import Resource - -from . import api - - -class ServiceStatus(Resource): - """Get service status""" - - def get(self): - """ - Get service status - --- - tags: - - Services - responses: - 200: - description: Service status - schema: - type: object - properties: - imap: - type: integer - description: Dovecot service status - smtp: - type: integer - description: Postfix service status - http: - type: integer - description: Nginx service status - bitwarden: - type: integer - description: Bitwarden service status - gitea: - type: integer - description: Gitea service status - nextcloud: - type: integer - description: Nextcloud service status - ocserv: - type: integer - description: OpenConnect VPN service status - pleroma: - type: integer - description: Pleroma service status - 401: - description: Unauthorized - """ - imap_service = subprocess.Popen(["systemctl", "status", "dovecot2.service"]) - imap_service.communicate()[0] - smtp_service = subprocess.Popen(["systemctl", "status", "postfix.service"]) - smtp_service.communicate()[0] - http_service = subprocess.Popen(["systemctl", "status", "nginx.service"]) - http_service.communicate()[0] - bitwarden_service = subprocess.Popen( - ["systemctl", "status", "vaultwarden.service"] - ) - bitwarden_service.communicate()[0] - gitea_service = subprocess.Popen(["systemctl", "status", "gitea.service"]) - gitea_service.communicate()[0] - nextcloud_service = subprocess.Popen( - ["systemctl", "status", "phpfpm-nextcloud.service"] - ) - nextcloud_service.communicate()[0] - ocserv_service = subprocess.Popen(["systemctl", "status", "ocserv.service"]) - ocserv_service.communicate()[0] - pleroma_service = subprocess.Popen(["systemctl", "status", "pleroma.service"]) - pleroma_service.communicate()[0] - - return { - "imap": imap_service.returncode, - "smtp": smtp_service.returncode, - "http": http_service.returncode, - "bitwarden": bitwarden_service.returncode, - "gitea": gitea_service.returncode, - "nextcloud": nextcloud_service.returncode, - "ocserv": ocserv_service.returncode, - "pleroma": pleroma_service.returncode, - } - - -api.add_resource(ServiceStatus, "/status") diff --git a/selfprivacy_api/resources/services/nextcloud.py b/selfprivacy_api/resources/services/nextcloud.py deleted file mode 100644 index 3aa9d06..0000000 --- a/selfprivacy_api/resources/services/nextcloud.py +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env python3 -"""Nextcloud management module""" -from flask_restful import Resource - -from selfprivacy_api.resources.services import api -from selfprivacy_api.utils import WriteUserData - - -class EnableNextcloud(Resource): - """Enable Nextcloud""" - - def post(self): - """ - Enable Nextcloud - --- - tags: - - Nextcloud - security: - - bearerAuth: [] - responses: - 200: - description: Nextcloud enabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "nextcloud" not in data: - data["nextcloud"] = {} - data["nextcloud"]["enable"] = True - - return { - "status": 0, - "message": "Nextcloud enabled", - } - - -class DisableNextcloud(Resource): - """Disable Nextcloud""" - - def post(self): - """ - Disable Nextcloud - --- - tags: - - Nextcloud - security: - - bearerAuth: [] - responses: - 200: - description: Nextcloud disabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "nextcloud" not in data: - data["nextcloud"] = {} - data["nextcloud"]["enable"] = False - - return { - "status": 0, - "message": "Nextcloud disabled", - } - - -api.add_resource(EnableNextcloud, "/nextcloud/enable") -api.add_resource(DisableNextcloud, "/nextcloud/disable") diff --git a/selfprivacy_api/resources/services/ocserv.py b/selfprivacy_api/resources/services/ocserv.py deleted file mode 100644 index 4dc83da..0000000 --- a/selfprivacy_api/resources/services/ocserv.py +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env python3 -"""OpenConnect VPN server management module""" -from flask_restful import Resource - -from selfprivacy_api.resources.services import api -from selfprivacy_api.utils import WriteUserData - - -class EnableOcserv(Resource): - """Enable OpenConnect VPN server""" - - def post(self): - """ - Enable OCserv - --- - tags: - - OCserv - security: - - bearerAuth: [] - responses: - 200: - description: OCserv enabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "ocserv" not in data: - data["ocserv"] = {} - data["ocserv"]["enable"] = True - - return { - "status": 0, - "message": "OpenConnect VPN server enabled", - } - - -class DisableOcserv(Resource): - """Disable OpenConnect VPN server""" - - def post(self): - """ - Disable OCserv - --- - tags: - - OCserv - security: - - bearerAuth: [] - responses: - 200: - description: OCserv disabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "ocserv" not in data: - data["ocserv"] = {} - data["ocserv"]["enable"] = False - - return { - "status": 0, - "message": "OpenConnect VPN server disabled", - } - - -api.add_resource(EnableOcserv, "/ocserv/enable") -api.add_resource(DisableOcserv, "/ocserv/disable") diff --git a/selfprivacy_api/resources/services/pleroma.py b/selfprivacy_api/resources/services/pleroma.py deleted file mode 100644 index aaf08f0..0000000 --- a/selfprivacy_api/resources/services/pleroma.py +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env python3 -"""Pleroma management module""" -from flask_restful import Resource - -from selfprivacy_api.resources.services import api -from selfprivacy_api.utils import WriteUserData - - -class EnablePleroma(Resource): - """Enable Pleroma""" - - def post(self): - """ - Enable Pleroma - --- - tags: - - Pleroma - security: - - bearerAuth: [] - responses: - 200: - description: Pleroma enabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "pleroma" not in data: - data["pleroma"] = {} - data["pleroma"]["enable"] = True - - return { - "status": 0, - "message": "Pleroma enabled", - } - - -class DisablePleroma(Resource): - """Disable Pleroma""" - - def post(self): - """ - Disable Pleroma - --- - tags: - - Pleroma - security: - - bearerAuth: [] - responses: - 200: - description: Pleroma disabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "pleroma" not in data: - data["pleroma"] = {} - data["pleroma"]["enable"] = False - - return { - "status": 0, - "message": "Pleroma disabled", - } - - -api.add_resource(EnablePleroma, "/pleroma/enable") -api.add_resource(DisablePleroma, "/pleroma/disable") diff --git a/selfprivacy_api/resources/services/restic.py b/selfprivacy_api/resources/services/restic.py deleted file mode 100644 index dd22c9a..0000000 --- a/selfprivacy_api/resources/services/restic.py +++ /dev/null @@ -1,241 +0,0 @@ -#!/usr/bin/env python3 -"""Backups management module""" -from flask_restful import Resource, reqparse - -from selfprivacy_api.resources.services import api -from selfprivacy_api.utils import WriteUserData -from selfprivacy_api.restic_controller import tasks as restic_tasks -from selfprivacy_api.restic_controller import ResticController, ResticStates - - -class ListAllBackups(Resource): - """List all restic backups""" - - def get(self): - """ - Get all restic backups - --- - tags: - - Backups - security: - - bearerAuth: [] - responses: - 200: - description: A list of snapshots - 400: - description: Bad request - 401: - description: Unauthorized - """ - - restic = ResticController() - return restic.snapshot_list - - -class AsyncCreateBackup(Resource): - """Create a new restic backup""" - - def put(self): - """ - Initiate a new restic backup - --- - tags: - - Backups - security: - - bearerAuth: [] - responses: - 200: - description: Backup creation has started - 400: - description: Bad request - 401: - description: Unauthorized - 409: - description: Backup already in progress - """ - restic = ResticController() - if restic.state is ResticStates.NO_KEY: - return {"error": "No key provided"}, 400 - if restic.state is ResticStates.INITIALIZING: - return {"error": "Backup is initializing"}, 400 - if restic.state is ResticStates.BACKING_UP: - return {"error": "Backup is already running"}, 409 - restic_tasks.start_backup() - return { - "status": 0, - "message": "Backup creation has started", - } - - -class CheckBackupStatus(Resource): - """Check current backup status""" - - def get(self): - """ - Get backup status - --- - tags: - - Backups - security: - - bearerAuth: [] - responses: - 200: - description: Backup status - 400: - description: Bad request - 401: - description: Unauthorized - """ - restic = ResticController() - - return { - "status": restic.state.name, - "progress": restic.progress, - "error_message": restic.error_message, - } - - -class ForceReloadSnapshots(Resource): - """Force reload snapshots""" - - def get(self): - """ - Force reload snapshots - --- - tags: - - Backups - security: - - bearerAuth: [] - responses: - 200: - description: Snapshots reloaded - 400: - description: Bad request - 401: - description: Unauthorized - """ - restic_tasks.load_snapshots() - return { - "status": 0, - "message": "Snapshots reload started", - } - - -class AsyncRestoreBackup(Resource): - """Trigger backup restoration process""" - - def put(self): - """ - Start backup restoration - --- - tags: - - Backups - security: - - bearerAuth: [] - parameters: - - in: body - required: true - name: backup - description: Backup to restore - schema: - type: object - required: - - backupId - properties: - backupId: - type: string - responses: - 200: - description: Backup restoration process started - 400: - description: Bad request - 401: - description: Unauthorized - """ - parser = reqparse.RequestParser() - parser.add_argument("backupId", type=str, required=True) - args = parser.parse_args() - - restic = ResticController() - if restic.state is ResticStates.NO_KEY: - return {"error": "No key provided"}, 400 - if restic.state is ResticStates.NOT_INITIALIZED: - return {"error": "Repository is not initialized"}, 400 - if restic.state is ResticStates.BACKING_UP: - return {"error": "Backup is already running"}, 409 - if restic.state is ResticStates.INITIALIZING: - return {"error": "Repository is initializing"}, 400 - if restic.state is ResticStates.RESTORING: - return {"error": "Restore is already running"}, 409 - for backup in restic.snapshot_list: - if backup["short_id"] == args["backupId"]: - restic_tasks.restore_from_backup(args["backupId"]) - return { - "status": 0, - "message": "Backup restoration procedure started", - } - - return {"error": "Backup not found"}, 404 - - -class BackblazeConfig(Resource): - """Backblaze config""" - - def put(self): - """ - Set the new key for backblaze - --- - tags: - - Backups - security: - - bearerAuth: [] - parameters: - - in: body - required: true - name: backblazeSettings - description: New Backblaze settings - schema: - type: object - required: - - accountId - - accountKey - - bucket - properties: - accountId: - type: string - accountKey: - type: string - bucket: - type: string - responses: - 200: - description: New Backblaze settings - 400: - description: Bad request - 401: - description: Unauthorized - """ - parser = reqparse.RequestParser() - parser.add_argument("accountId", type=str, required=True) - parser.add_argument("accountKey", type=str, required=True) - parser.add_argument("bucket", type=str, required=True) - args = parser.parse_args() - - with WriteUserData() as data: - if "backblaze" not in data: - data["backblaze"] = {} - data["backblaze"]["accountId"] = args["accountId"] - data["backblaze"]["accountKey"] = args["accountKey"] - data["backblaze"]["bucket"] = args["bucket"] - - restic_tasks.update_keys_from_userdata() - - return "New Backblaze settings saved" - - -api.add_resource(ListAllBackups, "/restic/backup/list") -api.add_resource(AsyncCreateBackup, "/restic/backup/create") -api.add_resource(CheckBackupStatus, "/restic/backup/status") -api.add_resource(AsyncRestoreBackup, "/restic/backup/restore") -api.add_resource(BackblazeConfig, "/restic/backblaze/config") -api.add_resource(ForceReloadSnapshots, "/restic/backup/reload") diff --git a/selfprivacy_api/resources/services/ssh.py b/selfprivacy_api/resources/services/ssh.py deleted file mode 100644 index 3ea5a1d..0000000 --- a/selfprivacy_api/resources/services/ssh.py +++ /dev/null @@ -1,407 +0,0 @@ -#!/usr/bin/env python3 -"""SSH management module""" -from flask_restful import Resource, reqparse - -from selfprivacy_api.resources.services import api -from selfprivacy_api.utils import WriteUserData, ReadUserData, validate_ssh_public_key - - -class EnableSSH(Resource): - """Enable SSH""" - - def post(self): - """ - Enable SSH - --- - tags: - - SSH - security: - - bearerAuth: [] - responses: - 200: - description: SSH enabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "ssh" not in data: - data["ssh"] = {} - data["ssh"]["enable"] = True - - return { - "status": 0, - "message": "SSH enabled", - } - - -class SSHSettings(Resource): - """Enable/disable SSH""" - - def get(self): - """ - Get current SSH settings - --- - tags: - - SSH - security: - - bearerAuth: [] - responses: - 200: - description: SSH settings - 400: - description: Bad request - """ - with ReadUserData() as data: - if "ssh" not in data: - return {"enable": True, "passwordAuthentication": True} - if "enable" not in data["ssh"]: - data["ssh"]["enable"] = True - if "passwordAuthentication" not in data["ssh"]: - data["ssh"]["passwordAuthentication"] = True - return { - "enable": data["ssh"]["enable"], - "passwordAuthentication": data["ssh"]["passwordAuthentication"], - } - - def put(self): - """ - Change SSH settings - --- - tags: - - SSH - security: - - bearerAuth: [] - parameters: - - name: sshSettings - in: body - required: true - description: SSH settings - schema: - type: object - required: - - enable - - passwordAuthentication - properties: - enable: - type: boolean - passwordAuthentication: - type: boolean - responses: - 200: - description: New settings saved - 400: - description: Bad request - """ - parser = reqparse.RequestParser() - parser.add_argument("enable", type=bool, required=False) - parser.add_argument("passwordAuthentication", type=bool, required=False) - args = parser.parse_args() - enable = args["enable"] - password_authentication = args["passwordAuthentication"] - - with WriteUserData() as data: - if "ssh" not in data: - data["ssh"] = {} - if enable is not None: - data["ssh"]["enable"] = enable - if password_authentication is not None: - data["ssh"]["passwordAuthentication"] = password_authentication - - return "SSH settings changed" - - -class WriteSSHKey(Resource): - """Write new SSH key""" - - def put(self): - """ - Add a SSH root key - --- - consumes: - - application/json - tags: - - SSH - security: - - bearerAuth: [] - parameters: - - in: body - name: body - required: true - description: Public key to add - schema: - type: object - required: - - public_key - properties: - public_key: - type: string - description: ssh-ed25519 public key. - responses: - 201: - description: Key added - 400: - description: Bad request - 401: - description: Unauthorized - 409: - description: Key already exists - """ - parser = reqparse.RequestParser() - parser.add_argument( - "public_key", type=str, required=True, help="Key cannot be blank!" - ) - args = parser.parse_args() - - public_key = args["public_key"] - - if not validate_ssh_public_key(public_key): - return { - "error": "Invalid key type. Only ssh-ed25519 and ssh-rsa are supported.", - }, 400 - - with WriteUserData() as data: - if "ssh" not in data: - data["ssh"] = {} - if "rootKeys" not in data["ssh"]: - data["ssh"]["rootKeys"] = [] - # Return 409 if key already in array - for key in data["ssh"]["rootKeys"]: - if key == public_key: - return { - "error": "Key already exists", - }, 409 - data["ssh"]["rootKeys"].append(public_key) - - return { - "status": 0, - "message": "New SSH key successfully written", - }, 201 - - -class SSHKeys(Resource): - """List SSH keys""" - - def get(self, username): - """ - List SSH keys - --- - tags: - - SSH - security: - - bearerAuth: [] - parameters: - - in: path - name: username - type: string - required: true - description: User to list keys for - responses: - 200: - description: SSH keys - 401: - description: Unauthorized - """ - with ReadUserData() as data: - if username == "root": - if "ssh" not in data: - data["ssh"] = {} - if "rootKeys" not in data["ssh"]: - data["ssh"]["rootKeys"] = [] - return data["ssh"]["rootKeys"] - if username == data["username"]: - if "sshKeys" not in data: - data["sshKeys"] = [] - return data["sshKeys"] - if "users" not in data: - data["users"] = [] - for user in data["users"]: - if user["username"] == username: - if "sshKeys" not in user: - user["sshKeys"] = [] - return user["sshKeys"] - return { - "error": "User not found", - }, 404 - - def post(self, username): - """ - Add SSH key to the user - --- - tags: - - SSH - security: - - bearerAuth: [] - parameters: - - in: body - required: true - name: public_key - schema: - type: object - required: - - public_key - properties: - public_key: - type: string - - in: path - name: username - type: string - required: true - description: User to add keys for - responses: - 201: - description: SSH key added - 401: - description: Unauthorized - 404: - description: User not found - 409: - description: Key already exists - """ - parser = reqparse.RequestParser() - parser.add_argument( - "public_key", type=str, required=True, help="Key cannot be blank!" - ) - args = parser.parse_args() - - if username == "root": - return { - "error": "Use /ssh/key/send to add root keys", - }, 400 - - if not validate_ssh_public_key(args["public_key"]): - return { - "error": "Invalid key type. Only ssh-ed25519 and ssh-rsa are supported.", - }, 400 - - with WriteUserData() as data: - if username == data["username"]: - if "sshKeys" not in data: - data["sshKeys"] = [] - # Return 409 if key already in array - for key in data["sshKeys"]: - if key == args["public_key"]: - return { - "error": "Key already exists", - }, 409 - data["sshKeys"].append(args["public_key"]) - return { - "message": "New SSH key successfully written", - }, 201 - - if "users" not in data: - data["users"] = [] - for user in data["users"]: - if user["username"] == username: - if "sshKeys" not in user: - user["sshKeys"] = [] - # Return 409 if key already in array - for key in user["sshKeys"]: - if key == args["public_key"]: - return { - "error": "Key already exists", - }, 409 - user["sshKeys"].append(args["public_key"]) - return { - "message": "New SSH key successfully written", - }, 201 - return { - "error": "User not found", - }, 404 - - def delete(self, username): - """ - Delete SSH key - --- - tags: - - SSH - security: - - bearerAuth: [] - parameters: - - in: body - name: public_key - required: true - description: Key to delete - schema: - type: object - required: - - public_key - properties: - public_key: - type: string - - in: path - name: username - type: string - required: true - description: User to delete keys for - responses: - 200: - description: SSH key deleted - 401: - description: Unauthorized - 404: - description: Key not found - """ - parser = reqparse.RequestParser() - parser.add_argument( - "public_key", type=str, required=True, help="Key cannot be blank!" - ) - args = parser.parse_args() - - with WriteUserData() as data: - if username == "root": - if "ssh" not in data: - data["ssh"] = {} - if "rootKeys" not in data["ssh"]: - data["ssh"]["rootKeys"] = [] - # Return 404 if key not in array - for key in data["ssh"]["rootKeys"]: - if key == args["public_key"]: - data["ssh"]["rootKeys"].remove(key) - # If rootKeys became zero length, delete it - if len(data["ssh"]["rootKeys"]) == 0: - del data["ssh"]["rootKeys"] - return { - "message": "SSH key deleted", - }, 200 - return { - "error": "Key not found", - }, 404 - if username == data["username"]: - if "sshKeys" not in data: - data["sshKeys"] = [] - # Return 404 if key not in array - for key in data["sshKeys"]: - if key == args["public_key"]: - data["sshKeys"].remove(key) - return { - "message": "SSH key deleted", - }, 200 - return { - "error": "Key not found", - }, 404 - if "users" not in data: - data["users"] = [] - for user in data["users"]: - if user["username"] == username: - if "sshKeys" not in user: - user["sshKeys"] = [] - # Return 404 if key not in array - for key in user["sshKeys"]: - if key == args["public_key"]: - user["sshKeys"].remove(key) - return { - "message": "SSH key successfully deleted", - }, 200 - return { - "error": "Key not found", - }, 404 - return { - "error": "User not found", - }, 404 - - -api.add_resource(EnableSSH, "/ssh/enable") -api.add_resource(SSHSettings, "/ssh") - -api.add_resource(WriteSSHKey, "/ssh/key/send") -api.add_resource(SSHKeys, "/ssh/keys/") diff --git a/selfprivacy_api/resources/system.py b/selfprivacy_api/resources/system.py deleted file mode 100644 index 958616e..0000000 --- a/selfprivacy_api/resources/system.py +++ /dev/null @@ -1,346 +0,0 @@ -#!/usr/bin/env python3 -"""System management module""" -import os -import subprocess -import pytz -from flask import Blueprint -from flask_restful import Resource, Api, reqparse -from selfprivacy_api.graphql.queries.system import ( - get_python_version, - get_system_version, -) - -from selfprivacy_api.utils import WriteUserData, ReadUserData - -api_system = Blueprint("system", __name__, url_prefix="/system") -api = Api(api_system) - - -class Timezone(Resource): - """Change timezone of NixOS""" - - def get(self): - """ - Get current system timezone - --- - tags: - - System - security: - - bearerAuth: [] - responses: - 200: - description: Timezone - 400: - description: Bad request - """ - with ReadUserData() as data: - if "timezone" not in data: - return "Europe/Uzhgorod" - return data["timezone"] - - def put(self): - """ - Change system timezone - --- - tags: - - System - security: - - bearerAuth: [] - parameters: - - name: timezone - in: body - required: true - description: Timezone to set - schema: - type: object - required: - - timezone - properties: - timezone: - type: string - responses: - 200: - description: Timezone changed - 400: - description: Bad request - """ - parser = reqparse.RequestParser() - parser.add_argument("timezone", type=str, required=True) - timezone = parser.parse_args()["timezone"] - - # Check if timezone is a valid tzdata string - if timezone not in pytz.all_timezones: - return {"error": "Invalid timezone"}, 400 - - with WriteUserData() as data: - data["timezone"] = timezone - return "Timezone changed" - - -class AutoUpgrade(Resource): - """Enable/disable automatic upgrades and reboots""" - - def get(self): - """ - Get current system autoupgrade settings - --- - tags: - - System - security: - - bearerAuth: [] - responses: - 200: - description: Auto-upgrade settings - 400: - description: Bad request - """ - with ReadUserData() as data: - if "autoUpgrade" not in data: - return {"enable": True, "allowReboot": False} - if "enable" not in data["autoUpgrade"]: - data["autoUpgrade"]["enable"] = True - if "allowReboot" not in data["autoUpgrade"]: - data["autoUpgrade"]["allowReboot"] = False - return data["autoUpgrade"] - - def put(self): - """ - Change system auto upgrade settings - --- - tags: - - System - security: - - bearerAuth: [] - parameters: - - name: autoUpgrade - in: body - required: true - description: Auto upgrade settings - schema: - type: object - required: - - enable - - allowReboot - properties: - enable: - type: boolean - allowReboot: - type: boolean - responses: - 200: - description: New settings saved - 400: - description: Bad request - """ - parser = reqparse.RequestParser() - parser.add_argument("enable", type=bool, required=False) - parser.add_argument("allowReboot", type=bool, required=False) - args = parser.parse_args() - enable = args["enable"] - allow_reboot = args["allowReboot"] - - with WriteUserData() as data: - if "autoUpgrade" not in data: - data["autoUpgrade"] = {} - if enable is not None: - data["autoUpgrade"]["enable"] = enable - if allow_reboot is not None: - data["autoUpgrade"]["allowReboot"] = allow_reboot - return "Auto-upgrade settings changed" - - -class RebuildSystem(Resource): - """Rebuild NixOS""" - - def get(self): - """ - Rebuild NixOS with nixos-rebuild switch - --- - tags: - - System - security: - - bearerAuth: [] - responses: - 200: - description: System rebuild has started - 401: - description: Unauthorized - """ - rebuild_result = subprocess.Popen( - ["systemctl", "start", "sp-nixos-rebuild.service"], start_new_session=True - ) - rebuild_result.communicate()[0] - return rebuild_result.returncode - - -class RollbackSystem(Resource): - """Rollback NixOS""" - - def get(self): - """ - Rollback NixOS with nixos-rebuild switch --rollback - --- - tags: - - System - security: - - bearerAuth: [] - responses: - 200: - description: System rollback has started - 401: - description: Unauthorized - """ - rollback_result = subprocess.Popen( - ["systemctl", "start", "sp-nixos-rollback.service"], start_new_session=True - ) - rollback_result.communicate()[0] - return rollback_result.returncode - - -class UpgradeSystem(Resource): - """Upgrade NixOS""" - - def get(self): - """ - Upgrade NixOS with nixos-rebuild switch --upgrade - --- - tags: - - System - security: - - bearerAuth: [] - responses: - 200: - description: System upgrade has started - 401: - description: Unauthorized - """ - upgrade_result = subprocess.Popen( - ["systemctl", "start", "sp-nixos-upgrade.service"], start_new_session=True - ) - upgrade_result.communicate()[0] - return upgrade_result.returncode - - -class RebootSystem(Resource): - """Reboot the system""" - - def get(self): - """ - Reboot the system - --- - tags: - - System - security: - - bearerAuth: [] - responses: - 200: - description: System reboot has started - 401: - description: Unauthorized - """ - subprocess.Popen(["reboot"], start_new_session=True) - return "System reboot has started" - - -class SystemVersion(Resource): - """Get system version from uname""" - - def get(self): - """ - Get system version from uname -a - --- - tags: - - System - security: - - bearerAuth: [] - responses: - 200: - description: OK - 401: - description: Unauthorized - """ - return { - "system_version": get_system_version(), - } - - -class PythonVersion(Resource): - """Get python version""" - - def get(self): - """ - Get python version used by this API - --- - tags: - - System - security: - - bearerAuth: [] - responses: - 200: - description: OK - 401: - description: Unauthorized - """ - return get_python_version() - - -class PullRepositoryChanges(Resource): - """Pull NixOS config repository changes""" - - def get(self): - """ - Pull Repository Changes - --- - tags: - - System - security: - - bearerAuth: [] - responses: - 200: - description: Got update - 201: - description: Nothing to update - 401: - description: Unauthorized - 500: - description: Something went wrong - """ - - git_pull_command = ["git", "pull"] - - current_working_directory = os.getcwd() - os.chdir("/etc/nixos") - - git_pull_process_descriptor = subprocess.Popen( - git_pull_command, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - shell=False, - ) - - data = git_pull_process_descriptor.communicate()[0].decode("utf-8") - - os.chdir(current_working_directory) - - if git_pull_process_descriptor.returncode == 0: - return { - "status": 0, - "message": "Update completed successfully", - "data": data, - } - return { - "status": git_pull_process_descriptor.returncode, - "message": "Something went wrong", - "data": data, - }, 500 - - -api.add_resource(Timezone, "/configuration/timezone") -api.add_resource(AutoUpgrade, "/configuration/autoUpgrade") -api.add_resource(RebuildSystem, "/configuration/apply") -api.add_resource(RollbackSystem, "/configuration/rollback") -api.add_resource(UpgradeSystem, "/configuration/upgrade") -api.add_resource(RebootSystem, "/reboot") -api.add_resource(SystemVersion, "/version") -api.add_resource(PythonVersion, "/pythonVersion") -api.add_resource(PullRepositoryChanges, "/configuration/pull") diff --git a/selfprivacy_api/resources/users.py b/selfprivacy_api/resources/users.py deleted file mode 100644 index e114324..0000000 --- a/selfprivacy_api/resources/users.py +++ /dev/null @@ -1,162 +0,0 @@ -#!/usr/bin/env python3 -"""Users management module""" -import subprocess -import re -from flask_restful import Resource, reqparse - -from selfprivacy_api.utils import WriteUserData, ReadUserData, is_username_forbidden - - -class Users(Resource): - """Users management""" - - def get(self): - """ - Get a list of users - --- - tags: - - Users - security: - - bearerAuth: [] - responses: - 200: - description: A list of users - 401: - description: Unauthorized - """ - parser = reqparse.RequestParser(bundle_errors=True) - parser.add_argument("withMainUser", type=bool, required=False) - args = parser.parse_args() - with_main_user = False if args["withMainUser"] is None else args["withMainUser"] - - with ReadUserData() as data: - users = [] - if with_main_user: - users.append(data["username"]) - if "users" in data: - for user in data["users"]: - users.append(user["username"]) - return users - - def post(self): - """ - Create a new user - --- - consumes: - - application/json - tags: - - Users - security: - - bearerAuth: [] - parameters: - - in: body - name: user - required: true - description: User to create - schema: - type: object - required: - - username - - password - properties: - username: - type: string - description: Unix username. Must be alphanumeric and less than 32 characters - password: - type: string - description: Unix password. - responses: - 201: - description: Created user - 400: - description: Bad request - 401: - description: Unauthorized - 409: - description: User already exists - """ - parser = reqparse.RequestParser(bundle_errors=True) - parser.add_argument("username", type=str, required=True) - parser.add_argument("password", type=str, required=True) - args = parser.parse_args() - hashing_command = ["mkpasswd", "-m", "sha-512", args["password"]] - password_hash_process_descriptor = subprocess.Popen( - hashing_command, - shell=False, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - ) - hashed_password = password_hash_process_descriptor.communicate()[0] - hashed_password = hashed_password.decode("ascii") - hashed_password = hashed_password.rstrip() - # Check if username is forbidden - if is_username_forbidden(args["username"]): - return {"message": "Username is forbidden"}, 409 - # Check is username passes regex - if not re.match(r"^[a-z_][a-z0-9_]+$", args["username"]): - return {"error": "username must be alphanumeric"}, 400 - # Check if username less than 32 characters - if len(args["username"]) >= 32: - return {"error": "username must be less than 32 characters"}, 400 - - with WriteUserData() as data: - if "users" not in data: - data["users"] = [] - - # Return 409 if user already exists - if data["username"] == args["username"]: - return {"error": "User already exists"}, 409 - - for user in data["users"]: - if user["username"] == args["username"]: - return {"error": "User already exists"}, 409 - - data["users"].append( - { - "username": args["username"], - "hashedPassword": hashed_password, - } - ) - - return {"result": 0, "username": args["username"]}, 201 - - -class User(Resource): - """Single user managment""" - - def delete(self, username): - """ - Delete a user - --- - tags: - - Users - security: - - bearerAuth: [] - parameters: - - in: path - name: username - required: true - description: User to delete - type: string - responses: - 200: - description: Deleted user - 400: - description: Bad request - 401: - description: Unauthorized - 404: - description: User not found - """ - with WriteUserData() as data: - if username == data["username"]: - return {"error": "Cannot delete root user"}, 400 - # Return 400 if user does not exist - for user in data["users"]: - if user["username"] == username: - data["users"].remove(user) - break - else: - return {"error": "User does not exist"}, 404 - - return {"result": 0, "username": username} diff --git a/selfprivacy_api/rest/__init__.py b/selfprivacy_api/rest/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/selfprivacy_api/rest/api_auth.py b/selfprivacy_api/rest/api_auth.py new file mode 100644 index 0000000..f73056c --- /dev/null +++ b/selfprivacy_api/rest/api_auth.py @@ -0,0 +1,127 @@ +from datetime import datetime +from typing import Optional +from fastapi import APIRouter, Depends, HTTPException +from pydantic import BaseModel +from selfprivacy_api.actions.api_tokens import ( + CannotDeleteCallerException, + InvalidExpirationDate, + InvalidUsesLeft, + NotFoundException, + delete_api_token, + get_api_recovery_token_status, + get_api_tokens_with_caller_flag, + get_new_api_recovery_key, + refresh_api_token, +) + +from selfprivacy_api.dependencies import TokenHeader, get_token_header + +from selfprivacy_api.utils.auth import ( + delete_new_device_auth_token, + get_new_device_auth_token, + use_mnemonic_recoverery_token, + use_new_device_auth_token, +) + +router = APIRouter( + prefix="/auth", + tags=["auth"], + responses={404: {"description": "Not found"}}, +) + + +@router.get("/tokens") +async def rest_get_tokens(auth_token: TokenHeader = Depends(get_token_header)): + """Get the tokens info""" + return get_api_tokens_with_caller_flag(auth_token.token) + + +class DeleteTokenInput(BaseModel): + """Delete token input""" + + token_name: str + + +@router.delete("/tokens") +async def rest_delete_tokens( + token: DeleteTokenInput, auth_token: TokenHeader = Depends(get_token_header) +): + """Delete the tokens""" + try: + delete_api_token(auth_token.token, token.token_name) + except NotFoundException: + raise HTTPException(status_code=404, detail="Token not found") + except CannotDeleteCallerException: + raise HTTPException(status_code=400, detail="Cannot delete caller's token") + return {"message": "Token deleted"} + + +@router.post("/tokens") +async def rest_refresh_token(auth_token: TokenHeader = Depends(get_token_header)): + """Refresh the token""" + try: + new_token = refresh_api_token(auth_token.token) + except NotFoundException: + raise HTTPException(status_code=404, detail="Token not found") + return {"token": new_token} + + +@router.get("/recovery_token") +async def rest_get_recovery_token_status( + auth_token: TokenHeader = Depends(get_token_header), +): + return get_api_recovery_token_status() + + +class CreateRecoveryTokenInput(BaseModel): + expiration: Optional[datetime] = None + uses: Optional[int] = None + + +@router.post("/recovery_token") +async def rest_create_recovery_token( + limits: CreateRecoveryTokenInput = CreateRecoveryTokenInput(), + auth_token: TokenHeader = Depends(get_token_header), +): + try: + token = get_new_api_recovery_key(limits.expiration, limits.uses) + except InvalidExpirationDate as e: + raise HTTPException(status_code=400, detail=str(e)) + except InvalidUsesLeft as e: + raise HTTPException(status_code=400, detail=str(e)) + return {"token": token} + + +class UseTokenInput(BaseModel): + token: str + device: str + + +@router.post("/recovery_token/use") +async def rest_use_recovery_token(input: UseTokenInput): + token = use_mnemonic_recoverery_token(input.token, input.device) + if token is None: + raise HTTPException(status_code=404, detail="Token not found") + return {"token": token} + + +@router.post("/new_device") +async def rest_new_device(auth_token: TokenHeader = Depends(get_token_header)): + token = get_new_device_auth_token() + return {"token": token} + + +@router.delete("/new_device") +async def rest_delete_new_device_token( + auth_token: TokenHeader = Depends(get_token_header), +): + delete_new_device_auth_token() + return {"token": None} + + +@router.post("/new_device/authorize") +async def rest_new_device_authorize(input: UseTokenInput): + token = use_new_device_auth_token(input.token, input.device) + if token is None: + raise HTTPException(status_code=404, detail="Token not found") + return {"message": "Device authorized", "token": token} diff --git a/selfprivacy_api/rest/services.py b/selfprivacy_api/rest/services.py new file mode 100644 index 0000000..c9d5ff9 --- /dev/null +++ b/selfprivacy_api/rest/services.py @@ -0,0 +1,373 @@ +"""Basic services legacy api""" +import base64 +from typing import Optional +from fastapi import APIRouter, Depends, HTTPException +from pydantic import BaseModel +from selfprivacy_api.actions.ssh import ( + InvalidPublicKey, + KeyAlreadyExists, + KeyNotFound, + create_ssh_key, + enable_ssh, + get_ssh_settings, + remove_ssh_key, + set_ssh_settings, +) +from selfprivacy_api.actions.users import UserNotFound, get_user_by_username + +from selfprivacy_api.dependencies import get_token_header +from selfprivacy_api.restic_controller import ResticController, ResticStates +from selfprivacy_api.restic_controller import tasks as restic_tasks +from selfprivacy_api.services.bitwarden import Bitwarden +from selfprivacy_api.services.gitea import Gitea +from selfprivacy_api.services.mailserver import MailServer +from selfprivacy_api.services.nextcloud import Nextcloud +from selfprivacy_api.services.ocserv import Ocserv +from selfprivacy_api.services.pleroma import Pleroma +from selfprivacy_api.services.service import ServiceStatus +from selfprivacy_api.utils import WriteUserData, get_dkim_key, get_domain + +router = APIRouter( + prefix="/services", + tags=["services"], + dependencies=[Depends(get_token_header)], + responses={404: {"description": "Not found"}}, +) + + +def service_status_to_return_code(status: ServiceStatus): + """Converts service status object to return code for + compatibility with legacy api""" + if status == ServiceStatus.ACTIVE: + return 0 + elif status == ServiceStatus.FAILED: + return 1 + elif status == ServiceStatus.INACTIVE: + return 3 + elif status == ServiceStatus.OFF: + return 4 + else: + return 2 + + +@router.get("/status") +async def get_status(): + """Get the status of the services""" + mail_status = MailServer.get_status() + bitwarden_status = Bitwarden.get_status() + gitea_status = Gitea.get_status() + nextcloud_status = Nextcloud.get_status() + ocserv_stauts = Ocserv.get_status() + pleroma_status = Pleroma.get_status() + + return { + "imap": service_status_to_return_code(mail_status), + "smtp": service_status_to_return_code(mail_status), + "http": 0, + "bitwarden": service_status_to_return_code(bitwarden_status), + "gitea": service_status_to_return_code(gitea_status), + "nextcloud": service_status_to_return_code(nextcloud_status), + "ocserv": service_status_to_return_code(ocserv_stauts), + "pleroma": service_status_to_return_code(pleroma_status), + } + + +@router.post("/bitwarden/enable") +async def enable_bitwarden(): + """Enable Bitwarden""" + Bitwarden.enable() + return { + "status": 0, + "message": "Bitwarden enabled", + } + + +@router.post("/bitwarden/disable") +async def disable_bitwarden(): + """Disable Bitwarden""" + Bitwarden.disable() + return { + "status": 0, + "message": "Bitwarden disabled", + } + + +@router.post("/gitea/enable") +async def enable_gitea(): + """Enable Gitea""" + Gitea.enable() + return { + "status": 0, + "message": "Gitea enabled", + } + + +@router.post("/gitea/disable") +async def disable_gitea(): + """Disable Gitea""" + Gitea.disable() + return { + "status": 0, + "message": "Gitea disabled", + } + + +@router.get("/mailserver/dkim") +async def get_mailserver_dkim(): + """Get the DKIM record for the mailserver""" + domain = get_domain() + + dkim = get_dkim_key(domain) + if dkim is None: + raise HTTPException(status_code=404, detail="DKIM record not found") + dkim = base64.b64encode(dkim.encode("utf-8")).decode("utf-8") + return dkim + + +@router.post("/nextcloud/enable") +async def enable_nextcloud(): + """Enable Nextcloud""" + Nextcloud.enable() + return { + "status": 0, + "message": "Nextcloud enabled", + } + + +@router.post("/nextcloud/disable") +async def disable_nextcloud(): + """Disable Nextcloud""" + Nextcloud.disable() + return { + "status": 0, + "message": "Nextcloud disabled", + } + + +@router.post("/ocserv/enable") +async def enable_ocserv(): + """Enable Ocserv""" + Ocserv.enable() + return { + "status": 0, + "message": "Ocserv enabled", + } + + +@router.post("/ocserv/disable") +async def disable_ocserv(): + """Disable Ocserv""" + Ocserv.disable() + return { + "status": 0, + "message": "Ocserv disabled", + } + + +@router.post("/pleroma/enable") +async def enable_pleroma(): + """Enable Pleroma""" + Pleroma.enable() + return { + "status": 0, + "message": "Pleroma enabled", + } + + +@router.post("/pleroma/disable") +async def disable_pleroma(): + """Disable Pleroma""" + Pleroma.disable() + return { + "status": 0, + "message": "Pleroma disabled", + } + + +@router.get("/restic/backup/list") +async def get_restic_backup_list(): + restic = ResticController() + return restic.snapshot_list + + +@router.put("/restic/backup/create") +async def create_restic_backup(): + restic = ResticController() + if restic.state is ResticStates.NO_KEY: + raise HTTPException(status_code=400, detail="Backup key not provided") + if restic.state is ResticStates.INITIALIZING: + raise HTTPException(status_code=400, detail="Backup is initializing") + if restic.state is ResticStates.BACKING_UP: + raise HTTPException(status_code=409, detail="Backup is already running") + restic_tasks.start_backup() + return { + "status": 0, + "message": "Backup creation has started", + } + + +@router.get("/restic/backup/status") +async def get_restic_backup_status(): + restic = ResticController() + + return { + "status": restic.state.name, + "progress": restic.progress, + "error_message": restic.error_message, + } + + +@router.get("/restic/backup/reload") +async def reload_restic_backup(): + restic_tasks.load_snapshots() + return { + "status": 0, + "message": "Snapshots reload started", + } + + +class BackupRestoreInput(BaseModel): + backupId: str + + +@router.put("/restic/backup/restore") +async def restore_restic_backup(backup: BackupRestoreInput): + restic = ResticController() + if restic.state is ResticStates.NO_KEY: + raise HTTPException(status_code=400, detail="Backup key not provided") + if restic.state is ResticStates.NOT_INITIALIZED: + raise HTTPException( + status_code=400, detail="Backups repository is not initialized" + ) + if restic.state is ResticStates.BACKING_UP: + raise HTTPException(status_code=409, detail="Backup is already running") + if restic.state is ResticStates.INITIALIZING: + raise HTTPException(status_code=400, detail="Repository is initializing") + if restic.state is ResticStates.RESTORING: + raise HTTPException(status_code=409, detail="Restore is already running") + + for backup_item in restic.snapshot_list: + if backup_item["short_id"] == backup.backupId: + restic_tasks.restore_from_backup(backup.backupId) + return { + "status": 0, + "message": "Backup restoration procedure started", + } + + raise HTTPException(status_code=404, detail="Backup not found") + + +class BackblazeConfigInput(BaseModel): + accountId: str + accountKey: str + bucket: str + + +@router.put("/restic/backblaze/config") +async def set_backblaze_config(backblaze_config: BackblazeConfigInput): + with WriteUserData() as data: + if "backblaze" not in data: + data["backblaze"] = {} + data["backblaze"]["accountId"] = backblaze_config.accountId + data["backblaze"]["accountKey"] = backblaze_config.accountKey + data["backblaze"]["bucket"] = backblaze_config.bucket + + restic_tasks.update_keys_from_userdata() + + return "New Backblaze settings saved" + + +@router.post("/ssh/enable") +async def rest_enable_ssh(): + """Enable SSH""" + enable_ssh() + return { + "status": 0, + "message": "SSH enabled", + } + + +@router.get("/ssh") +async def rest_get_ssh(): + """Get the SSH configuration""" + settings = get_ssh_settings() + return { + "enable": settings.enable, + "passwordAuthentication": settings.passwordAuthentication, + } + + +class SshConfigInput(BaseModel): + enable: Optional[bool] = None + passwordAuthentication: Optional[bool] = None + + +@router.put("/ssh") +async def rest_set_ssh(ssh_config: SshConfigInput): + """Set the SSH configuration""" + set_ssh_settings(ssh_config.enable, ssh_config.passwordAuthentication) + + return "SSH settings changed" + + +class SshKeyInput(BaseModel): + public_key: str + + +@router.put("/ssh/key/send", status_code=201) +async def rest_send_ssh_key(input: SshKeyInput): + """Send the SSH key""" + try: + create_ssh_key("root", input.public_key) + except KeyAlreadyExists as error: + raise HTTPException(status_code=409, detail="Key already exists") from error + except InvalidPublicKey as error: + raise HTTPException( + status_code=400, + detail="Invalid key type. Only ssh-ed25519 and ssh-rsa are supported", + ) from error + + return { + "status": 0, + "message": "SSH key sent", + } + + +@router.get("/ssh/keys/{username}") +async def rest_get_ssh_keys(username: str): + """Get the SSH keys for a user""" + user = get_user_by_username(username) + if user is None: + raise HTTPException(status_code=404, detail="User not found") + + return user.ssh_keys + + +@router.post("/ssh/keys/{username}", status_code=201) +async def rest_add_ssh_key(username: str, input: SshKeyInput): + try: + create_ssh_key(username, input.public_key) + except KeyAlreadyExists as error: + raise HTTPException(status_code=409, detail="Key already exists") from error + except InvalidPublicKey as error: + raise HTTPException( + status_code=400, + detail="Invalid key type. Only ssh-ed25519 and ssh-rsa are supported", + ) from error + except UserNotFound as error: + raise HTTPException(status_code=404, detail="User not found") from error + + return { + "message": "New SSH key successfully written", + } + + +@router.delete("/ssh/keys/{username}") +async def rest_delete_ssh_key(username: str, input: SshKeyInput): + try: + remove_ssh_key(username, input.public_key) + except KeyNotFound as error: + raise HTTPException(status_code=404, detail="Key not found") from error + except UserNotFound as error: + raise HTTPException(status_code=404, detail="User not found") from error + return {"message": "SSH key deleted"} diff --git a/selfprivacy_api/rest/system.py b/selfprivacy_api/rest/system.py new file mode 100644 index 0000000..9933fb3 --- /dev/null +++ b/selfprivacy_api/rest/system.py @@ -0,0 +1,105 @@ +from typing import Optional +from fastapi import APIRouter, Body, Depends, HTTPException +from pydantic import BaseModel + +from selfprivacy_api.dependencies import get_token_header + +import selfprivacy_api.actions.system as system_actions + +router = APIRouter( + prefix="/system", + tags=["system"], + dependencies=[Depends(get_token_header)], + responses={404: {"description": "Not found"}}, +) + + +@router.get("/configuration/timezone") +async def get_timezone(): + """Get the timezone of the server""" + return system_actions.get_timezone() + + +class ChangeTimezoneRequestBody(BaseModel): + """Change the timezone of the server""" + + timezone: str + + +@router.put("/configuration/timezone") +async def change_timezone(timezone: ChangeTimezoneRequestBody): + """Change the timezone of the server""" + try: + system_actions.change_timezone(timezone.timezone) + except system_actions.InvalidTimezone as e: + raise HTTPException(status_code=400, detail=str(e)) + return {"timezone": timezone.timezone} + + +@router.get("/configuration/autoUpgrade") +async def get_auto_upgrade_settings(): + """Get the auto-upgrade settings""" + return system_actions.get_auto_upgrade_settings().dict() + + +class AutoUpgradeSettings(BaseModel): + """Settings for auto-upgrading user data""" + + enable: Optional[bool] = None + allowReboot: Optional[bool] = None + + +@router.put("/configuration/autoUpgrade") +async def set_auto_upgrade_settings(settings: AutoUpgradeSettings): + """Set the auto-upgrade settings""" + system_actions.set_auto_upgrade_settings(settings.enable, settings.allowReboot) + return "Auto-upgrade settings changed" + + +@router.get("/configuration/apply") +async def apply_configuration(): + """Apply the configuration""" + return_code = system_actions.rebuild_system() + return return_code + + +@router.get("/configuration/rollback") +async def rollback_configuration(): + """Rollback the configuration""" + return_code = system_actions.rollback_system() + return return_code + + +@router.get("/configuration/upgrade") +async def upgrade_configuration(): + """Upgrade the configuration""" + return_code = system_actions.upgrade_system() + return return_code + + +@router.get("/reboot") +async def reboot_system(): + """Reboot the system""" + system_actions.reboot_system() + return "System reboot has started" + + +@router.get("/version") +async def get_system_version(): + """Get the system version""" + return {"system_version": system_actions.get_system_version()} + + +@router.get("/pythonVersion") +async def get_python_version(): + """Get the Python version""" + return system_actions.get_python_version() + + +@router.get("/configuration/pull") +async def pull_configuration(): + """Pull the configuration""" + action_result = system_actions.pull_repository_changes() + if action_result.status == 0: + return action_result.dict() + raise HTTPException(status_code=500, detail=action_result.dict()) diff --git a/selfprivacy_api/rest/users.py b/selfprivacy_api/rest/users.py new file mode 100644 index 0000000..ab4c6c9 --- /dev/null +++ b/selfprivacy_api/rest/users.py @@ -0,0 +1,62 @@ +"""Users management module""" +from typing import Optional +from fastapi import APIRouter, Body, Depends, HTTPException +from pydantic import BaseModel + +import selfprivacy_api.actions.users as users_actions + +from selfprivacy_api.dependencies import get_token_header + +router = APIRouter( + prefix="/users", + tags=["users"], + dependencies=[Depends(get_token_header)], + responses={404: {"description": "Not found"}}, +) + + +@router.get("") +async def get_users(withMainUser: bool = False): + """Get the list of users""" + users: list[users_actions.UserDataUser] = users_actions.get_users( + exclude_primary=not withMainUser, exclude_root=True + ) + + return [user.username for user in users] + + +class UserInput(BaseModel): + """User input""" + + username: str + password: str + + +@router.post("", status_code=201) +async def create_user(user: UserInput): + try: + users_actions.create_user(user.username, user.password) + except users_actions.PasswordIsEmpty as e: + raise HTTPException(status_code=400, detail=str(e)) + except users_actions.UsernameForbidden as e: + raise HTTPException(status_code=409, detail=str(e)) + except users_actions.UsernameNotAlphanumeric as e: + raise HTTPException(status_code=400, detail=str(e)) + except users_actions.UsernameTooLong as e: + raise HTTPException(status_code=400, detail=str(e)) + except users_actions.UserAlreadyExists as e: + raise HTTPException(status_code=409, detail=str(e)) + + return {"result": 0, "username": user.username} + + +@router.delete("/{username}") +async def delete_user(username: str): + try: + users_actions.delete_user(username) + except users_actions.UserNotFound as e: + raise HTTPException(status_code=404, detail=str(e)) + except users_actions.UserIsProtected as e: + raise HTTPException(status_code=400, detail=str(e)) + + return {"result": 0, "username": username} diff --git a/selfprivacy_api/restic_controller/tasks.py b/selfprivacy_api/restic_controller/tasks.py index 4c610c4..f583d8b 100644 --- a/selfprivacy_api/restic_controller/tasks.py +++ b/selfprivacy_api/restic_controller/tasks.py @@ -1,10 +1,8 @@ """Tasks for the restic controller.""" from huey import crontab -from huey.contrib.mini import MiniHuey +from selfprivacy_api.utils.huey import huey from . import ResticController, ResticStates -huey = MiniHuey() - @huey.task() def init_restic(): diff --git a/selfprivacy_api/services/__init__.py b/selfprivacy_api/services/__init__.py index e69de29..a688734 100644 --- a/selfprivacy_api/services/__init__.py +++ b/selfprivacy_api/services/__init__.py @@ -0,0 +1,67 @@ +"""Services module.""" + +import typing +from selfprivacy_api.services.bitwarden import Bitwarden +from selfprivacy_api.services.gitea import Gitea +from selfprivacy_api.services.jitsi import Jitsi +from selfprivacy_api.services.mailserver import MailServer +from selfprivacy_api.services.nextcloud import Nextcloud +from selfprivacy_api.services.pleroma import Pleroma +from selfprivacy_api.services.ocserv import Ocserv +from selfprivacy_api.services.service import Service, ServiceDnsRecord +import selfprivacy_api.utils.network as network_utils + +services: list[Service] = [ + Bitwarden(), + Gitea(), + MailServer(), + Nextcloud(), + Pleroma(), + Ocserv(), + Jitsi(), +] + + +def get_all_services() -> list[Service]: + return services + + +def get_service_by_id(service_id: str) -> typing.Optional[Service]: + for service in services: + if service.get_id() == service_id: + return service + return None + + +def get_enabled_services() -> list[Service]: + return [service for service in services if service.is_enabled()] + + +def get_disabled_services() -> list[Service]: + return [service for service in services if not service.is_enabled()] + + +def get_services_by_location(location: str) -> list[Service]: + return [service for service in services if service.get_location() == location] + + +def get_all_required_dns_records() -> list[ServiceDnsRecord]: + ip4 = network_utils.get_ip4() + ip6 = network_utils.get_ip6() + dns_records: list[ServiceDnsRecord] = [ + ServiceDnsRecord( + type="A", + name="api", + content=ip4, + ttl=3600, + ), + ServiceDnsRecord( + type="AAAA", + name="api", + content=ip6, + ttl=3600, + ), + ] + for service in get_enabled_services(): + dns_records += service.get_dns_records() + return dns_records diff --git a/selfprivacy_api/services/bitwarden/__init__.py b/selfprivacy_api/services/bitwarden/__init__.py new file mode 100644 index 0000000..ea93de1 --- /dev/null +++ b/selfprivacy_api/services/bitwarden/__init__.py @@ -0,0 +1,174 @@ +"""Class representing Bitwarden service""" +import base64 +import subprocess +import typing + +from selfprivacy_api.jobs import Job, JobStatus, Jobs +from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service +from selfprivacy_api.services.generic_size_counter import get_storage_usage +from selfprivacy_api.services.generic_status_getter import get_service_status +from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus +from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain +from selfprivacy_api.utils.block_devices import BlockDevice +from selfprivacy_api.utils.huey import huey +import selfprivacy_api.utils.network as network_utils +from selfprivacy_api.services.bitwarden.icon import BITWARDEN_ICON + + +class Bitwarden(Service): + """Class representing Bitwarden service.""" + + @staticmethod + def get_id() -> str: + """Return service id.""" + return "bitwarden" + + @staticmethod + def get_display_name() -> str: + """Return service display name.""" + return "Bitwarden" + + @staticmethod + def get_description() -> str: + """Return service description.""" + return "Bitwarden is a password manager." + + @staticmethod + def get_svg_icon() -> str: + """Read SVG icon from file and return it as base64 encoded string.""" + return base64.b64encode(BITWARDEN_ICON.encode("utf-8")).decode("utf-8") + + @staticmethod + def get_url() -> typing.Optional[str]: + """Return service url.""" + domain = get_domain() + return f"https://password.{domain}" + + @staticmethod + def is_movable() -> bool: + return True + + @staticmethod + def is_required() -> bool: + return False + + @staticmethod + def is_enabled() -> bool: + with ReadUserData() as user_data: + return user_data.get("bitwarden", {}).get("enable", False) + + @staticmethod + def get_status() -> ServiceStatus: + """ + Return Bitwarden status from systemd. + Use command return code to determine status. + + Return code 0 means service is running. + Return code 1 or 2 means service is in error stat. + Return code 3 means service is stopped. + Return code 4 means service is off. + """ + return get_service_status("vaultwarden.service") + + @staticmethod + def enable(): + """Enable Bitwarden service.""" + with WriteUserData() as user_data: + if "bitwarden" not in user_data: + user_data["bitwarden"] = {} + user_data["bitwarden"]["enable"] = True + + @staticmethod + def disable(): + """Disable Bitwarden service.""" + with WriteUserData() as user_data: + if "bitwarden" not in user_data: + user_data["bitwarden"] = {} + user_data["bitwarden"]["enable"] = False + + @staticmethod + def stop(): + subprocess.run(["systemctl", "stop", "vaultwarden.service"]) + + @staticmethod + def start(): + subprocess.run(["systemctl", "start", "vaultwarden.service"]) + + @staticmethod + def restart(): + subprocess.run(["systemctl", "restart", "vaultwarden.service"]) + + @staticmethod + def get_configuration(): + return {} + + @staticmethod + def set_configuration(config_items): + return super().set_configuration(config_items) + + @staticmethod + def get_logs(): + return "" + + @staticmethod + def get_storage_usage() -> int: + storage_usage = 0 + storage_usage += get_storage_usage("/var/lib/bitwarden") + storage_usage += get_storage_usage("/var/lib/bitwarden_rs") + return storage_usage + + @staticmethod + def get_location() -> str: + with ReadUserData() as user_data: + if user_data.get("useBinds", False): + return user_data.get("bitwarden", {}).get("location", "sda1") + else: + return "sda1" + + @staticmethod + def get_dns_records() -> typing.List[ServiceDnsRecord]: + """Return list of DNS records for Bitwarden service.""" + return [ + ServiceDnsRecord( + type="A", + name="password", + content=network_utils.get_ip4(), + ttl=3600, + ), + ServiceDnsRecord( + type="AAAA", + name="password", + content=network_utils.get_ip6(), + ttl=3600, + ), + ] + + def move_to_volume(self, volume: BlockDevice) -> Job: + job = Jobs.get_instance().add( + type_id="services.bitwarden.move", + name="Move Bitwarden", + description=f"Moving Bitwarden data to {volume.name}", + ) + + move_service( + self, + volume, + job, + [ + FolderMoveNames( + name="bitwarden", + bind_location="/var/lib/bitwarden", + group="vaultwarden", + owner="vaultwarden", + ), + FolderMoveNames( + name="bitwarden_rs", + bind_location="/var/lib/bitwarden_rs", + group="vaultwarden", + owner="vaultwarden", + ), + ], + "bitwarden", + ) + + return job diff --git a/selfprivacy_api/services/bitwarden/bitwarden.svg b/selfprivacy_api/services/bitwarden/bitwarden.svg new file mode 100644 index 0000000..ced270c --- /dev/null +++ b/selfprivacy_api/services/bitwarden/bitwarden.svg @@ -0,0 +1,3 @@ + + + diff --git a/selfprivacy_api/services/bitwarden/icon.py b/selfprivacy_api/services/bitwarden/icon.py new file mode 100644 index 0000000..f9280e0 --- /dev/null +++ b/selfprivacy_api/services/bitwarden/icon.py @@ -0,0 +1,5 @@ +BITWARDEN_ICON = """ + + + +""" diff --git a/selfprivacy_api/services/generic_service_mover.py b/selfprivacy_api/services/generic_service_mover.py new file mode 100644 index 0000000..c7d7c3b --- /dev/null +++ b/selfprivacy_api/services/generic_service_mover.py @@ -0,0 +1,237 @@ +"""Generic handler for moving services""" + +import subprocess +import time +import pathlib +import shutil + +from pydantic import BaseModel +from selfprivacy_api.jobs import Job, JobStatus, Jobs +from selfprivacy_api.utils.huey import huey +from selfprivacy_api.utils.block_devices import BlockDevice +from selfprivacy_api.utils import ReadUserData, WriteUserData +from selfprivacy_api.services.service import Service, ServiceStatus + + +class FolderMoveNames(BaseModel): + name: str + bind_location: str + owner: str + group: str + + +@huey.task() +def move_service( + service: Service, + volume: BlockDevice, + job: Job, + folder_names: list[FolderMoveNames], + userdata_location: str, +): + """Move a service to another volume.""" + job = Jobs.get_instance().update( + job=job, + status_text="Performing pre-move checks...", + status=JobStatus.RUNNING, + ) + service_name = service.get_display_name() + with ReadUserData() as user_data: + if not user_data.get("useBinds", False): + Jobs.get_instance().update( + job=job, + status=JobStatus.ERROR, + error="Server is not using binds.", + ) + return + # Check if we are on the same volume + old_volume = service.get_location() + if old_volume == volume.name: + Jobs.get_instance().update( + job=job, + status=JobStatus.ERROR, + error=f"{service_name} is already on this volume.", + ) + return + # Check if there is enough space on the new volume + if int(volume.fsavail) < service.get_storage_usage(): + Jobs.get_instance().update( + job=job, + status=JobStatus.ERROR, + error="Not enough space on the new volume.", + ) + return + # Make sure the volume is mounted + if f"/volumes/{volume.name}" not in volume.mountpoints: + Jobs.get_instance().update( + job=job, + status=JobStatus.ERROR, + error="Volume is not mounted.", + ) + return + # Make sure current actual directory exists and if its user and group are correct + for folder in folder_names: + if not pathlib.Path(f"/volumes/{old_volume}/{folder.name}").exists(): + Jobs.get_instance().update( + job=job, + status=JobStatus.ERROR, + error=f"{service_name} is not found.", + ) + return + if not pathlib.Path(f"/volumes/{old_volume}/{folder.name}").is_dir(): + Jobs.get_instance().update( + job=job, + status=JobStatus.ERROR, + error=f"{service_name} is not a directory.", + ) + return + if ( + not pathlib.Path(f"/volumes/{old_volume}/{folder.name}").owner() + == folder.owner + ): + Jobs.get_instance().update( + job=job, + status=JobStatus.ERROR, + error=f"{service_name} owner is not {folder.owner}.", + ) + return + + # Stop service + Jobs.get_instance().update( + job=job, + status=JobStatus.RUNNING, + status_text=f"Stopping {service_name}...", + progress=5, + ) + service.stop() + # Wait for the service to stop, check every second + # If it does not stop in 30 seconds, abort + for _ in range(30): + if service.get_status() not in ( + ServiceStatus.ACTIVATING, + ServiceStatus.DEACTIVATING, + ): + break + time.sleep(1) + else: + Jobs.get_instance().update( + job=job, + status=JobStatus.ERROR, + error=f"{service_name} did not stop in 30 seconds.", + ) + return + + # Unmount old volume + Jobs.get_instance().update( + job=job, + status_text="Unmounting old folder...", + status=JobStatus.RUNNING, + progress=10, + ) + for folder in folder_names: + try: + subprocess.run( + ["umount", folder.bind_location], + check=True, + ) + except subprocess.CalledProcessError: + Jobs.get_instance().update( + job=job, + status=JobStatus.ERROR, + error="Unable to unmount old volume.", + ) + return + # Move data to new volume and set correct permissions + Jobs.get_instance().update( + job=job, + status_text="Moving data to new volume...", + status=JobStatus.RUNNING, + progress=20, + ) + current_progress = 20 + folder_percentage = 50 // len(folder_names) + for folder in folder_names: + shutil.move( + f"/volumes/{old_volume}/{folder.name}", + f"/volumes/{volume.name}/{folder.name}", + ) + Jobs.get_instance().update( + job=job, + status_text="Moving data to new volume...", + status=JobStatus.RUNNING, + progress=current_progress + folder_percentage, + ) + + Jobs.get_instance().update( + job=job, + status_text=f"Making sure {service_name} owns its files...", + status=JobStatus.RUNNING, + progress=70, + ) + for folder in folder_names: + try: + subprocess.run( + [ + "chown", + "-R", + f"{folder.owner}:f{folder.group}", + f"/volumes/{volume.name}/{folder.name}", + ], + check=True, + ) + except subprocess.CalledProcessError as error: + print(error.output) + Jobs.get_instance().update( + job=job, + status=JobStatus.RUNNING, + error=f"Unable to set ownership of new volume. {service_name} may not be able to access its files. Continuing anyway.", + ) + return + + # Mount new volume + Jobs.get_instance().update( + job=job, + status_text=f"Mounting {service_name} data...", + status=JobStatus.RUNNING, + progress=90, + ) + + for folder in folder_names: + try: + subprocess.run( + [ + "mount", + "--bind", + f"/volumes/{volume.name}/{folder.name}", + folder.bind_location, + ], + check=True, + ) + except subprocess.CalledProcessError as error: + print(error.output) + Jobs.get_instance().update( + job=job, + status=JobStatus.ERROR, + error="Unable to mount new volume.", + ) + return + + # Update userdata + Jobs.get_instance().update( + job=job, + status_text="Finishing move...", + status=JobStatus.RUNNING, + progress=95, + ) + with WriteUserData() as user_data: + if userdata_location not in user_data: + user_data[userdata_location] = {} + user_data[userdata_location]["location"] = volume.name + # Start service + service.start() + Jobs.get_instance().update( + job=job, + status=JobStatus.FINISHED, + result=f"{service_name} moved successfully.", + status_text=f"Starting {service}...", + progress=100, + ) diff --git a/selfprivacy_api/services/generic_size_counter.py b/selfprivacy_api/services/generic_size_counter.py new file mode 100644 index 0000000..4a706fb --- /dev/null +++ b/selfprivacy_api/services/generic_size_counter.py @@ -0,0 +1,16 @@ +"""Generic size counter using pathlib""" +import pathlib + + +def get_storage_usage(path: str) -> int: + """ + Calculate the real storage usage of path and all subdirectories. + Calculate using pathlib. + Do not follow symlinks. + """ + storage_usage = 0 + for iter_path in pathlib.Path(path).rglob("**/*"): + if iter_path.is_dir(): + continue + storage_usage += iter_path.stat().st_size + return storage_usage diff --git a/selfprivacy_api/services/generic_status_getter.py b/selfprivacy_api/services/generic_status_getter.py new file mode 100644 index 0000000..46720af --- /dev/null +++ b/selfprivacy_api/services/generic_status_getter.py @@ -0,0 +1,60 @@ +"""Generic service status fetcher using systemctl""" +import subprocess + +from selfprivacy_api.services.service import ServiceStatus + + +def get_service_status(service: str) -> ServiceStatus: + """ + Return service status from systemd. + Use systemctl show to get the status of a service. + Get ActiveState from the output. + """ + service_status = subprocess.check_output(["systemctl", "show", service]) + if b"LoadState=not-found" in service_status: + return ServiceStatus.OFF + if b"ActiveState=active" in service_status: + return ServiceStatus.ACTIVE + if b"ActiveState=inactive" in service_status: + return ServiceStatus.INACTIVE + if b"ActiveState=activating" in service_status: + return ServiceStatus.ACTIVATING + if b"ActiveState=deactivating" in service_status: + return ServiceStatus.DEACTIVATING + if b"ActiveState=failed" in service_status: + return ServiceStatus.FAILED + if b"ActiveState=reloading" in service_status: + return ServiceStatus.RELOADING + return ServiceStatus.OFF + + +def get_service_status_from_several_units(services: list[str]) -> ServiceStatus: + """ + Fetch all service statuses for all services and return the worst status. + Statuses from worst to best: + - OFF + - FAILED + - RELOADING + - ACTIVATING + - DEACTIVATING + - INACTIVE + - ACTIVE + """ + service_statuses = [] + for service in services: + service_statuses.append(get_service_status(service)) + if ServiceStatus.OFF in service_statuses: + return ServiceStatus.OFF + if ServiceStatus.FAILED in service_statuses: + return ServiceStatus.FAILED + if ServiceStatus.RELOADING in service_statuses: + return ServiceStatus.RELOADING + if ServiceStatus.ACTIVATING in service_statuses: + return ServiceStatus.ACTIVATING + if ServiceStatus.DEACTIVATING in service_statuses: + return ServiceStatus.DEACTIVATING + if ServiceStatus.INACTIVE in service_statuses: + return ServiceStatus.INACTIVE + if ServiceStatus.ACTIVE in service_statuses: + return ServiceStatus.ACTIVE + return ServiceStatus.OFF diff --git a/selfprivacy_api/services/gitea/__init__.py b/selfprivacy_api/services/gitea/__init__.py new file mode 100644 index 0000000..d563164 --- /dev/null +++ b/selfprivacy_api/services/gitea/__init__.py @@ -0,0 +1,165 @@ +"""Class representing Bitwarden service""" +import base64 +import subprocess +import typing + +from selfprivacy_api.jobs import Job, Jobs +from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service +from selfprivacy_api.services.generic_size_counter import get_storage_usage +from selfprivacy_api.services.generic_status_getter import get_service_status +from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus +from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain +from selfprivacy_api.utils.block_devices import BlockDevice +from selfprivacy_api.utils.huey import huey +import selfprivacy_api.utils.network as network_utils +from selfprivacy_api.services.gitea.icon import GITEA_ICON + + +class Gitea(Service): + """Class representing Gitea service""" + + @staticmethod + def get_id() -> str: + """Return service id.""" + return "gitea" + + @staticmethod + def get_display_name() -> str: + """Return service display name.""" + return "Gitea" + + @staticmethod + def get_description() -> str: + """Return service description.""" + return "Gitea is a Git forge." + + @staticmethod + def get_svg_icon() -> str: + """Read SVG icon from file and return it as base64 encoded string.""" + return base64.b64encode(GITEA_ICON.encode("utf-8")).decode("utf-8") + + @staticmethod + def get_url() -> typing.Optional[str]: + """Return service url.""" + domain = get_domain() + return f"https://git.{domain}" + + @staticmethod + def is_movable() -> bool: + return True + + @staticmethod + def is_required() -> bool: + return False + + @staticmethod + def is_enabled() -> bool: + with ReadUserData() as user_data: + return user_data.get("gitea", {}).get("enable", False) + + @staticmethod + def get_status() -> ServiceStatus: + """ + Return Gitea status from systemd. + Use command return code to determine status. + Return code 0 means service is running. + Return code 1 or 2 means service is in error stat. + Return code 3 means service is stopped. + Return code 4 means service is off. + """ + return get_service_status("gitea.service") + + @staticmethod + def enable(): + """Enable Gitea service.""" + with WriteUserData() as user_data: + if "gitea" not in user_data: + user_data["gitea"] = {} + user_data["gitea"]["enable"] = True + + @staticmethod + def disable(): + """Disable Gitea service.""" + with WriteUserData() as user_data: + if "gitea" not in user_data: + user_data["gitea"] = {} + user_data["gitea"]["enable"] = False + + @staticmethod + def stop(): + subprocess.run(["systemctl", "stop", "gitea.service"]) + + @staticmethod + def start(): + subprocess.run(["systemctl", "start", "gitea.service"]) + + @staticmethod + def restart(): + subprocess.run(["systemctl", "restart", "gitea.service"]) + + @staticmethod + def get_configuration(): + return {} + + @staticmethod + def set_configuration(config_items): + return super().set_configuration(config_items) + + @staticmethod + def get_logs(): + return "" + + @staticmethod + def get_storage_usage() -> int: + storage_usage = 0 + storage_usage += get_storage_usage("/var/lib/gitea") + return storage_usage + + @staticmethod + def get_location() -> str: + with ReadUserData() as user_data: + if user_data.get("useBinds", False): + return user_data.get("gitea", {}).get("location", "sda1") + else: + return "sda1" + + @staticmethod + def get_dns_records() -> typing.List[ServiceDnsRecord]: + return [ + ServiceDnsRecord( + type="A", + name="git", + content=network_utils.get_ip4(), + ttl=3600, + ), + ServiceDnsRecord( + type="AAAA", + name="git", + content=network_utils.get_ip6(), + ttl=3600, + ), + ] + + def move_to_volume(self, volume: BlockDevice) -> Job: + job = Jobs.get_instance().add( + type_id="services.gitea.move", + name="Move Gitea", + description=f"Moving Gitea data to {volume.name}", + ) + + move_service( + self, + volume, + job, + [ + FolderMoveNames( + name="gitea", + bind_location="/var/lib/gitea", + group="gitea", + owner="gitea", + ), + ], + "bitwarden", + ) + + return job diff --git a/selfprivacy_api/services/gitea/gitea.svg b/selfprivacy_api/services/gitea/gitea.svg new file mode 100644 index 0000000..9ba8a76 --- /dev/null +++ b/selfprivacy_api/services/gitea/gitea.svg @@ -0,0 +1,3 @@ + + + diff --git a/selfprivacy_api/services/gitea/icon.py b/selfprivacy_api/services/gitea/icon.py new file mode 100644 index 0000000..569f96a --- /dev/null +++ b/selfprivacy_api/services/gitea/icon.py @@ -0,0 +1,5 @@ +GITEA_ICON = """ + + + +""" diff --git a/selfprivacy_api/services/jitsi/__init__.py b/selfprivacy_api/services/jitsi/__init__.py new file mode 100644 index 0000000..6b3a973 --- /dev/null +++ b/selfprivacy_api/services/jitsi/__init__.py @@ -0,0 +1,142 @@ +"""Class representing Jitsi service""" +import base64 +import subprocess +import typing + +from selfprivacy_api.jobs import Job, Jobs +from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service +from selfprivacy_api.services.generic_size_counter import get_storage_usage +from selfprivacy_api.services.generic_status_getter import ( + get_service_status, + get_service_status_from_several_units, +) +from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus +from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain +from selfprivacy_api.utils.block_devices import BlockDevice +from selfprivacy_api.utils.huey import huey +import selfprivacy_api.utils.network as network_utils +from selfprivacy_api.services.jitsi.icon import JITSI_ICON + + +class Jitsi(Service): + """Class representing Jitsi service""" + + @staticmethod + def get_id() -> str: + """Return service id.""" + return "jitsi" + + @staticmethod + def get_display_name() -> str: + """Return service display name.""" + return "Jitsi" + + @staticmethod + def get_description() -> str: + """Return service description.""" + return "Jitsi is a free and open-source video conferencing solution." + + @staticmethod + def get_svg_icon() -> str: + """Read SVG icon from file and return it as base64 encoded string.""" + return base64.b64encode(JITSI_ICON.encode("utf-8")).decode("utf-8") + + @staticmethod + def get_url() -> typing.Optional[str]: + """Return service url.""" + domain = get_domain() + return f"https://meet.{domain}" + + @staticmethod + def is_movable() -> bool: + return False + + @staticmethod + def is_required() -> bool: + return False + + @staticmethod + def is_enabled() -> bool: + with ReadUserData() as user_data: + return user_data.get("jitsi", {}).get("enable", False) + + @staticmethod + def get_status() -> ServiceStatus: + return get_service_status_from_several_units( + ["jitsi-videobridge.service", "jicofo.service"] + ) + + @staticmethod + def enable(): + """Enable Jitsi service.""" + with WriteUserData() as user_data: + if "jitsi" not in user_data: + user_data["jitsi"] = {} + user_data["jitsi"]["enable"] = True + + @staticmethod + def disable(): + """Disable Gitea service.""" + with WriteUserData() as user_data: + if "jitsi" not in user_data: + user_data["jitsi"] = {} + user_data["jitsi"]["enable"] = False + + @staticmethod + def stop(): + subprocess.run(["systemctl", "stop", "jitsi-videobridge.service"]) + subprocess.run(["systemctl", "stop", "jicofo.service"]) + + @staticmethod + def start(): + subprocess.run(["systemctl", "start", "jitsi-videobridge.service"]) + subprocess.run(["systemctl", "start", "jicofo.service"]) + + @staticmethod + def restart(): + subprocess.run(["systemctl", "restart", "jitsi-videobridge.service"]) + subprocess.run(["systemctl", "restart", "jicofo.service"]) + + @staticmethod + def get_configuration(): + return {} + + @staticmethod + def set_configuration(config_items): + return super().set_configuration(config_items) + + @staticmethod + def get_logs(): + return "" + + @staticmethod + def get_storage_usage() -> int: + storage_usage = 0 + storage_usage += get_storage_usage("/var/lib/jitsi-meet") + return storage_usage + + @staticmethod + def get_location() -> str: + return "sda1" + + @staticmethod + def get_dns_records() -> typing.List[ServiceDnsRecord]: + ip4 = network_utils.get_ip4() + ip6 = network_utils.get_ip6() + return [ + ServiceDnsRecord( + type="A", + name="meet", + content=ip4, + ttl=3600, + ), + ServiceDnsRecord( + type="AAAA", + name="meet", + content=ip6, + ttl=3600, + ), + ] + + def move_to_volume(self, volume: BlockDevice) -> Job: + raise NotImplementedError("jitsi service is not movable") diff --git a/selfprivacy_api/services/jitsi/icon.py b/selfprivacy_api/services/jitsi/icon.py new file mode 100644 index 0000000..08bcbb1 --- /dev/null +++ b/selfprivacy_api/services/jitsi/icon.py @@ -0,0 +1,5 @@ +JITSI_ICON = """ + + + +""" diff --git a/selfprivacy_api/services/mailserver/__init__.py b/selfprivacy_api/services/mailserver/__init__.py new file mode 100644 index 0000000..1a72f33 --- /dev/null +++ b/selfprivacy_api/services/mailserver/__init__.py @@ -0,0 +1,179 @@ +"""Class representing Dovecot and Postfix services""" + +import base64 +import subprocess +import typing + +from selfprivacy_api.jobs import Job, JobStatus, Jobs +from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service +from selfprivacy_api.services.generic_size_counter import get_storage_usage +from selfprivacy_api.services.generic_status_getter import ( + get_service_status, + get_service_status_from_several_units, +) +from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus +import selfprivacy_api.utils as utils +from selfprivacy_api.utils.block_devices import BlockDevice +from selfprivacy_api.utils.huey import huey +import selfprivacy_api.utils.network as network_utils +from selfprivacy_api.services.mailserver.icon import MAILSERVER_ICON + + +class MailServer(Service): + """Class representing mail service""" + + @staticmethod + def get_id() -> str: + return "mailserver" + + @staticmethod + def get_display_name() -> str: + return "Mail Server" + + @staticmethod + def get_description() -> str: + return "E-Mail for company and family." + + @staticmethod + def get_svg_icon() -> str: + return base64.b64encode(MAILSERVER_ICON.encode("utf-8")).decode("utf-8") + + @staticmethod + def get_url() -> typing.Optional[str]: + """Return service url.""" + return None + + @staticmethod + def is_movable() -> bool: + return True + + @staticmethod + def is_required() -> bool: + return True + + @staticmethod + def is_enabled() -> bool: + return True + + @staticmethod + def get_status() -> ServiceStatus: + return get_service_status_from_several_units( + ["dovecot2.service", "postfix.service"] + ) + + @staticmethod + def enable(): + raise NotImplementedError("enable is not implemented for MailServer") + + @staticmethod + def disable(): + raise NotImplementedError("disable is not implemented for MailServer") + + @staticmethod + def stop(): + subprocess.run(["systemctl", "stop", "dovecot2.service"]) + subprocess.run(["systemctl", "stop", "postfix.service"]) + + @staticmethod + def start(): + subprocess.run(["systemctl", "start", "dovecot2.service"]) + subprocess.run(["systemctl", "start", "postfix.service"]) + + @staticmethod + def restart(): + subprocess.run(["systemctl", "restart", "dovecot2.service"]) + subprocess.run(["systemctl", "restart", "postfix.service"]) + + @staticmethod + def get_configuration(): + return {} + + @staticmethod + def set_configuration(config_items): + return super().set_configuration(config_items) + + @staticmethod + def get_logs(): + return "" + + @staticmethod + def get_storage_usage() -> int: + return get_storage_usage("/var/vmail") + + @staticmethod + def get_location() -> str: + with utils.ReadUserData() as user_data: + if user_data.get("useBinds", False): + return user_data.get("mailserver", {}).get("location", "sda1") + else: + return "sda1" + + @staticmethod + def get_dns_records() -> typing.List[ServiceDnsRecord]: + domain = utils.get_domain() + dkim_record = utils.get_dkim_key(domain) + ip4 = network_utils.get_ip4() + ip6 = network_utils.get_ip6() + + if dkim_record is None: + return [] + + return [ + ServiceDnsRecord( + type="A", + name=domain, + content=ip4, + ttl=3600, + ), + ServiceDnsRecord( + type="AAAA", + name=domain, + content=ip6, + ttl=3600, + ), + ServiceDnsRecord( + type="MX", name=domain, content=domain, ttl=3600, priority=10 + ), + ServiceDnsRecord( + type="TXT", name="_dmarc", content=f"v=DMARC1; p=none", ttl=18000 + ), + ServiceDnsRecord( + type="TXT", + name=domain, + content=f"v=spf1 a mx ip4:{ip4} -all", + ttl=18000, + ), + ServiceDnsRecord( + type="TXT", name="selector._domainkey", content=dkim_record, ttl=18000 + ), + ] + + def move_to_volume(self, volume: BlockDevice) -> Job: + job = Jobs.get_instance().add( + type_id="services.mailserver.move", + name="Move Mail Server", + description=f"Moving mailserver data to {volume.name}", + ) + + move_service( + self, + volume, + job, + [ + FolderMoveNames( + name="vmail", + bind_location="/var/vmail", + group="virtualMail", + owner="virtualMail", + ), + FolderMoveNames( + name="sieve", + bind_location="/var/sieve", + group="virtualMail", + owner="virtualMail", + ), + ], + "mailserver", + ) + + return job diff --git a/selfprivacy_api/services/mailserver/icon.py b/selfprivacy_api/services/mailserver/icon.py new file mode 100644 index 0000000..a688ef3 --- /dev/null +++ b/selfprivacy_api/services/mailserver/icon.py @@ -0,0 +1,5 @@ +MAILSERVER_ICON = """ + + + +""" diff --git a/selfprivacy_api/services/mailserver/mailserver.svg b/selfprivacy_api/services/mailserver/mailserver.svg new file mode 100644 index 0000000..d7d0ee2 --- /dev/null +++ b/selfprivacy_api/services/mailserver/mailserver.svg @@ -0,0 +1,3 @@ + + + diff --git a/selfprivacy_api/services/nextcloud/__init__.py b/selfprivacy_api/services/nextcloud/__init__.py index 525f657..4057b49 100644 --- a/selfprivacy_api/services/nextcloud/__init__.py +++ b/selfprivacy_api/services/nextcloud/__init__.py @@ -1,36 +1,62 @@ """Class representing Nextcloud service.""" import base64 import subprocess -import psutil -from selfprivacy_api.services.service import Service, ServiceStatus -from selfprivacy_api.utils import ReadUserData, WriteUserData +import typing +from selfprivacy_api.jobs import Job, Jobs +from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service +from selfprivacy_api.services.generic_size_counter import get_storage_usage +from selfprivacy_api.services.generic_status_getter import get_service_status +from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus +from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain +from selfprivacy_api.utils.block_devices import BlockDevice +import selfprivacy_api.utils.network as network_utils +from selfprivacy_api.services.nextcloud.icon import NEXTCLOUD_ICON class Nextcloud(Service): """Class representing Nextcloud service.""" - def get_id(self) -> str: + @staticmethod + def get_id() -> str: """Return service id.""" return "nextcloud" - def get_display_name(self) -> str: + @staticmethod + def get_display_name() -> str: """Return service display name.""" return "Nextcloud" - def get_description(self) -> str: + @staticmethod + def get_description() -> str: """Return service description.""" return "Nextcloud is a cloud storage service that offers a web interface and a desktop client." - def get_svg_icon(self) -> str: + @staticmethod + def get_svg_icon() -> str: """Read SVG icon from file and return it as base64 encoded string.""" - with open("selfprivacy_api/services/nextcloud/nextcloud.svg", "rb") as f: - return base64.b64encode(f.read()).decode("utf-8") + return base64.b64encode(NEXTCLOUD_ICON.encode("utf-8")).decode("utf-8") - def is_enabled(self) -> bool: + @staticmethod + def get_url() -> typing.Optional[str]: + """Return service url.""" + domain = get_domain() + return f"https://cloud.{domain}" + + @staticmethod + def is_movable() -> bool: + return True + + @staticmethod + def is_required() -> bool: + return False + + @staticmethod + def is_enabled() -> bool: with ReadUserData() as user_data: return user_data.get("nextcloud", {}).get("enable", False) - def get_status(self) -> ServiceStatus: + @staticmethod + def get_status() -> ServiceStatus: """ Return Nextcloud status from systemd. Use command return code to determine status. @@ -40,57 +66,106 @@ class Nextcloud(Service): Return code 3 means service is stopped. Return code 4 means service is off. """ - service_status = subprocess.Popen( - ["systemctl", "status", "phpfpm-nextcloud.service"] - ) - service_status.communicate()[0] - if service_status.returncode == 0: - return ServiceStatus.RUNNING - elif service_status.returncode == 1 or service_status.returncode == 2: - return ServiceStatus.ERROR - elif service_status.returncode == 3: - return ServiceStatus.STOPPED - elif service_status.returncode == 4: - return ServiceStatus.OFF - else: - return ServiceStatus.DEGRADED + return get_service_status("phpfpm-nextcloud.service") - def enable(self): + @staticmethod + def enable(): """Enable Nextcloud service.""" with WriteUserData() as user_data: if "nextcloud" not in user_data: user_data["nextcloud"] = {} user_data["nextcloud"]["enable"] = True - def disable(self): + @staticmethod + def disable(): """Disable Nextcloud service.""" with WriteUserData() as user_data: if "nextcloud" not in user_data: user_data["nextcloud"] = {} user_data["nextcloud"]["enable"] = False - def stop(self): + @staticmethod + def stop(): """Stop Nextcloud service.""" subprocess.Popen(["systemctl", "stop", "phpfpm-nextcloud.service"]) - def start(self): + @staticmethod + def start(): """Start Nextcloud service.""" subprocess.Popen(["systemctl", "start", "phpfpm-nextcloud.service"]) - def restart(self): + @staticmethod + def restart(): """Restart Nextcloud service.""" subprocess.Popen(["systemctl", "restart", "phpfpm-nextcloud.service"]) - def get_configuration(self) -> dict: + @staticmethod + def get_configuration() -> dict: """Return Nextcloud configuration.""" return {} - def set_configuration(self, config_items): + @staticmethod + def set_configuration(config_items): return super().set_configuration(config_items) - def get_logs(self): + @staticmethod + def get_logs(): """Return Nextcloud logs.""" return "" - def get_storage_usage(self): - return psutil.disk_usage("/var/lib/nextcloud").used + @staticmethod + def get_storage_usage() -> int: + """ + Calculate the real storage usage of /var/lib/nextcloud and all subdirectories. + Calculate using pathlib. + Do not follow symlinks. + """ + return get_storage_usage("/var/lib/nextcloud") + + @staticmethod + def get_location() -> str: + """Get the name of disk where Nextcloud is installed.""" + with ReadUserData() as user_data: + if user_data.get("useBinds", False): + return user_data.get("nextcloud", {}).get("location", "sda1") + else: + return "sda1" + + @staticmethod + def get_dns_records() -> typing.List[ServiceDnsRecord]: + return [ + ServiceDnsRecord( + type="A", + name="cloud", + content=network_utils.get_ip4(), + ttl=3600, + ), + ServiceDnsRecord( + type="AAAA", + name="cloud", + content=network_utils.get_ip6(), + ttl=3600, + ), + ] + + def move_to_volume(self, volume: BlockDevice) -> Job: + job = Jobs.get_instance().add( + type_id="services.nextcloud.move", + name="Move Nextcloud", + description=f"Moving Nextcloud to volume {volume.name}", + ) + move_service( + self, + volume, + job, + [ + FolderMoveNames( + name="nextcloud", + bind_location="/var/lib/nextcloud", + owner="nextcloud", + group="nextcloud", + ), + ], + "nextcloud", + ) + return job diff --git a/selfprivacy_api/services/nextcloud/icon.py b/selfprivacy_api/services/nextcloud/icon.py new file mode 100644 index 0000000..d178640 --- /dev/null +++ b/selfprivacy_api/services/nextcloud/icon.py @@ -0,0 +1,12 @@ +NEXTCLOUD_ICON = """ + + + + + + + + + + +""" diff --git a/selfprivacy_api/services/ocserv/__init__.py b/selfprivacy_api/services/ocserv/__init__.py new file mode 100644 index 0000000..dcfacaa --- /dev/null +++ b/selfprivacy_api/services/ocserv/__init__.py @@ -0,0 +1,121 @@ +"""Class representing ocserv service.""" +import base64 +import subprocess +import typing +from selfprivacy_api.jobs import Job, Jobs +from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service +from selfprivacy_api.services.generic_size_counter import get_storage_usage +from selfprivacy_api.services.generic_status_getter import get_service_status +from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus +from selfprivacy_api.utils import ReadUserData, WriteUserData +from selfprivacy_api.utils.block_devices import BlockDevice +from selfprivacy_api.services.ocserv.icon import OCSERV_ICON +import selfprivacy_api.utils.network as network_utils + + +class Ocserv(Service): + """Class representing ocserv service.""" + + @staticmethod + def get_id() -> str: + return "ocserv" + + @staticmethod + def get_display_name() -> str: + return "OpenConnect VPN" + + @staticmethod + def get_description() -> str: + return "OpenConnect VPN to connect your devices and access the internet." + + @staticmethod + def get_svg_icon() -> str: + return base64.b64encode(OCSERV_ICON.encode("utf-8")).decode("utf-8") + + @staticmethod + def get_url() -> typing.Optional[str]: + """Return service url.""" + return None + + @staticmethod + def is_movable() -> bool: + return False + + @staticmethod + def is_required() -> bool: + return False + + @staticmethod + def is_enabled() -> bool: + with ReadUserData() as user_data: + return user_data.get("ocserv", {}).get("enable", False) + + @staticmethod + def get_status() -> ServiceStatus: + return get_service_status("ocserv.service") + + @staticmethod + def enable(): + with WriteUserData() as user_data: + if "ocserv" not in user_data: + user_data["ocserv"] = {} + user_data["ocserv"]["enable"] = True + + @staticmethod + def disable(): + with WriteUserData() as user_data: + if "ocserv" not in user_data: + user_data["ocserv"] = {} + user_data["ocserv"]["enable"] = False + + @staticmethod + def stop(): + subprocess.run(["systemctl", "stop", "ocserv.service"]) + + @staticmethod + def start(): + subprocess.run(["systemctl", "start", "ocserv.service"]) + + @staticmethod + def restart(): + subprocess.run(["systemctl", "restart", "ocserv.service"]) + + @staticmethod + def get_configuration(): + return {} + + @staticmethod + def set_configuration(config_items): + return super().set_configuration(config_items) + + @staticmethod + def get_logs(): + return "" + + @staticmethod + def get_location() -> str: + return "sda1" + + @staticmethod + def get_dns_records() -> typing.List[ServiceDnsRecord]: + return [ + ServiceDnsRecord( + type="A", + name="vpn", + content=network_utils.get_ip4(), + ttl=3600, + ), + ServiceDnsRecord( + type="AAAA", + name="vpn", + content=network_utils.get_ip6(), + ttl=3600, + ), + ] + + @staticmethod + def get_storage_usage() -> int: + return 0 + + def move_to_volume(self, volume: BlockDevice) -> Job: + raise NotImplementedError("ocserv service is not movable") diff --git a/selfprivacy_api/services/ocserv/icon.py b/selfprivacy_api/services/ocserv/icon.py new file mode 100644 index 0000000..6585c5e --- /dev/null +++ b/selfprivacy_api/services/ocserv/icon.py @@ -0,0 +1,5 @@ +OCSERV_ICON = """ + + + +""" diff --git a/selfprivacy_api/services/ocserv/ocserv.svg b/selfprivacy_api/services/ocserv/ocserv.svg new file mode 100644 index 0000000..288f743 --- /dev/null +++ b/selfprivacy_api/services/ocserv/ocserv.svg @@ -0,0 +1,3 @@ + + + diff --git a/selfprivacy_api/services/pleroma/__init__.py b/selfprivacy_api/services/pleroma/__init__.py new file mode 100644 index 0000000..97c11f5 --- /dev/null +++ b/selfprivacy_api/services/pleroma/__init__.py @@ -0,0 +1,157 @@ +"""Class representing Nextcloud service.""" +import base64 +import subprocess +import typing +from selfprivacy_api.jobs import Job, Jobs +from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service +from selfprivacy_api.services.generic_size_counter import get_storage_usage +from selfprivacy_api.services.generic_status_getter import get_service_status +from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus +from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain +from selfprivacy_api.utils.block_devices import BlockDevice +import selfprivacy_api.utils.network as network_utils +from selfprivacy_api.services.pleroma.icon import PLEROMA_ICON + + +class Pleroma(Service): + """Class representing Pleroma service.""" + + @staticmethod + def get_id() -> str: + return "pleroma" + + @staticmethod + def get_display_name() -> str: + return "Pleroma" + + @staticmethod + def get_description() -> str: + return "Pleroma is a microblogging service that offers a web interface and a desktop client." + + @staticmethod + def get_svg_icon() -> str: + return base64.b64encode(PLEROMA_ICON.encode("utf-8")).decode("utf-8") + + @staticmethod + def get_url() -> typing.Optional[str]: + """Return service url.""" + domain = get_domain() + return f"https://social.{domain}" + + @staticmethod + def is_movable() -> bool: + return True + + @staticmethod + def is_required() -> bool: + return False + + @staticmethod + def is_enabled() -> bool: + with ReadUserData() as user_data: + return user_data.get("pleroma", {}).get("enable", False) + + @staticmethod + def get_status() -> ServiceStatus: + return get_service_status("pleroma.service") + + @staticmethod + def enable(): + with WriteUserData() as user_data: + if "pleroma" not in user_data: + user_data["pleroma"] = {} + user_data["pleroma"]["enable"] = True + + @staticmethod + def disable(): + with WriteUserData() as user_data: + if "pleroma" not in user_data: + user_data["pleroma"] = {} + user_data["pleroma"]["enable"] = False + + @staticmethod + def stop(): + subprocess.run(["systemctl", "stop", "pleroma.service"]) + subprocess.run(["systemctl", "stop", "postgresql.service"]) + + @staticmethod + def start(): + subprocess.run(["systemctl", "start", "pleroma.service"]) + subprocess.run(["systemctl", "start", "postgresql.service"]) + + @staticmethod + def restart(): + subprocess.run(["systemctl", "restart", "pleroma.service"]) + subprocess.run(["systemctl", "restart", "postgresql.service"]) + + @staticmethod + def get_configuration(config_items): + return {} + + @staticmethod + def set_configuration(config_items): + return super().set_configuration(config_items) + + @staticmethod + def get_logs(): + return "" + + @staticmethod + def get_storage_usage() -> int: + storage_usage = 0 + storage_usage += get_storage_usage("/var/lib/pleroma") + storage_usage += get_storage_usage("/var/lib/postgresql") + return storage_usage + + @staticmethod + def get_location() -> str: + with ReadUserData() as user_data: + if user_data.get("useBinds", False): + return user_data.get("pleroma", {}).get("location", "sda1") + else: + return "sda1" + + @staticmethod + def get_dns_records() -> typing.List[ServiceDnsRecord]: + return [ + ServiceDnsRecord( + type="A", + name="social", + content=network_utils.get_ip4(), + ttl=3600, + ), + ServiceDnsRecord( + type="AAAA", + name="social", + content=network_utils.get_ip6(), + ttl=3600, + ), + ] + + def move_to_volume(self, volume: BlockDevice) -> Job: + job = Jobs.get_instance().add( + type_id="services.pleroma.move", + name="Move Pleroma", + description=f"Moving Pleroma to volume {volume.name}", + ) + move_service( + self, + volume, + job, + [ + FolderMoveNames( + name="pleroma", + bind_location="/var/lib/pleroma", + owner="pleroma", + group="pleroma", + ), + FolderMoveNames( + name="postgresql", + bind_location="/var/lib/postgresql", + owner="postgres", + group="postgres", + ), + ], + "pleroma", + ) + return job diff --git a/selfprivacy_api/services/pleroma/icon.py b/selfprivacy_api/services/pleroma/icon.py new file mode 100644 index 0000000..c0c4d2b --- /dev/null +++ b/selfprivacy_api/services/pleroma/icon.py @@ -0,0 +1,12 @@ +PLEROMA_ICON = """ + + + + + + + + + + +""" diff --git a/selfprivacy_api/services/pleroma/pleroma.svg b/selfprivacy_api/services/pleroma/pleroma.svg new file mode 100644 index 0000000..f87c438 --- /dev/null +++ b/selfprivacy_api/services/pleroma/pleroma.svg @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index a0e6ae6..515e28f 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -3,23 +3,30 @@ from abc import ABC, abstractmethod from enum import Enum import typing +from pydantic import BaseModel +from selfprivacy_api.jobs import Job + +from selfprivacy_api.utils.block_devices import BlockDevice + class ServiceStatus(Enum): """Enum for service status""" - RUNNING = "RUNNING" - DEGRADED = "DEGRADED" - ERROR = "ERROR" - STOPPED = "STOPPED" + ACTIVE = "ACTIVE" + RELOADING = "RELOADING" + INACTIVE = "INACTIVE" + FAILED = "FAILED" + ACTIVATING = "ACTIVATING" + DEACTIVATING = "DEACTIVATING" OFF = "OFF" -class ServiceDnsRecord: +class ServiceDnsRecord(BaseModel): type: str name: str content: str ttl: int - priority: typing.Optional[int] + priority: typing.Optional[int] = None class Service(ABC): @@ -28,66 +35,106 @@ class Service(ABC): can be installed, configured and used by a user. """ + @staticmethod @abstractmethod - def get_id(self) -> str: + def get_id() -> str: + pass + + @staticmethod + @abstractmethod + def get_display_name() -> str: + pass + + @staticmethod + @abstractmethod + def get_description() -> str: + pass + + @staticmethod + @abstractmethod + def get_svg_icon() -> str: + pass + + @staticmethod + @abstractmethod + def get_url() -> typing.Optional[str]: + pass + + @staticmethod + @abstractmethod + def is_movable() -> bool: + pass + + @staticmethod + @abstractmethod + def is_required() -> bool: + pass + + @staticmethod + @abstractmethod + def is_enabled() -> bool: + pass + + @staticmethod + @abstractmethod + def get_status() -> ServiceStatus: + pass + + @staticmethod + @abstractmethod + def enable(): + pass + + @staticmethod + @abstractmethod + def disable(): + pass + + @staticmethod + @abstractmethod + def stop(): + pass + + @staticmethod + @abstractmethod + def start(): + pass + + @staticmethod + @abstractmethod + def restart(): + pass + + @staticmethod + @abstractmethod + def get_configuration(): + pass + + @staticmethod + @abstractmethod + def set_configuration(config_items): + pass + + @staticmethod + @abstractmethod + def get_logs(): + pass + + @staticmethod + @abstractmethod + def get_storage_usage() -> int: + pass + + @staticmethod + @abstractmethod + def get_dns_records() -> typing.List[ServiceDnsRecord]: + pass + + @staticmethod + @abstractmethod + def get_location() -> str: pass @abstractmethod - def get_display_name(self) -> str: - pass - - @abstractmethod - def get_description(self) -> str: - pass - - @abstractmethod - def get_svg_icon(self) -> str: - pass - - @abstractmethod - def is_enabled(self) -> bool: - pass - - @abstractmethod - def get_status(self) -> ServiceStatus: - pass - - @abstractmethod - def enable(self): - pass - - @abstractmethod - def disable(self): - pass - - @abstractmethod - def stop(self): - pass - - @abstractmethod - def start(self): - pass - - @abstractmethod - def restart(self): - pass - - @abstractmethod - def get_configuration(self): - pass - - @abstractmethod - def set_configuration(self, config_items): - pass - - @abstractmethod - def get_logs(self): - pass - - @abstractmethod - def get_storage_usage(self): - pass - - @abstractmethod - def get_dns_records(self) -> typing.List[ServiceDnsRecord]: + def move_to_volume(self, volume: BlockDevice) -> Job: pass diff --git a/selfprivacy_api/task_registry.py b/selfprivacy_api/task_registry.py new file mode 100644 index 0000000..82eaf06 --- /dev/null +++ b/selfprivacy_api/task_registry.py @@ -0,0 +1,4 @@ +from selfprivacy_api.utils.huey import huey +from selfprivacy_api.jobs.test import test_job +from selfprivacy_api.restic_controller.tasks import * +from selfprivacy_api.services.generic_service_mover import move_service diff --git a/selfprivacy_api/utils/__init__.py b/selfprivacy_api/utils/__init__.py index 8ab26d1..83213d7 100644 --- a/selfprivacy_api/utils/__init__.py +++ b/selfprivacy_api/utils/__init__.py @@ -10,6 +10,7 @@ import portalocker USERDATA_FILE = "/etc/nixos/userdata/userdata.json" TOKENS_FILE = "/etc/nixos/userdata/tokens.json" +JOBS_FILE = "/etc/nixos/userdata/jobs.json" DOMAIN_FILE = "/var/domain" @@ -18,6 +19,7 @@ class UserDataFiles(Enum): USERDATA = 0 TOKENS = 1 + JOBS = 2 def get_domain(): @@ -35,6 +37,12 @@ class WriteUserData(object): self.userdata_file = open(USERDATA_FILE, "r+", encoding="utf-8") elif file_type == UserDataFiles.TOKENS: self.userdata_file = open(TOKENS_FILE, "r+", encoding="utf-8") + elif file_type == UserDataFiles.JOBS: + # Make sure file exists + if not os.path.exists(JOBS_FILE): + with open(JOBS_FILE, "w", encoding="utf-8") as jobs_file: + jobs_file.write("{}") + self.userdata_file = open(JOBS_FILE, "r+", encoding="utf-8") else: raise ValueError("Unknown file type") portalocker.lock(self.userdata_file, portalocker.LOCK_EX) @@ -60,6 +68,12 @@ class ReadUserData(object): self.userdata_file = open(USERDATA_FILE, "r", encoding="utf-8") elif file_type == UserDataFiles.TOKENS: self.userdata_file = open(TOKENS_FILE, "r", encoding="utf-8") + elif file_type == UserDataFiles.JOBS: + # Make sure file exists + if not os.path.exists(JOBS_FILE): + with open(JOBS_FILE, "w", encoding="utf-8") as jobs_file: + jobs_file.write("{}") + self.userdata_file = open(JOBS_FILE, "r", encoding="utf-8") else: raise ValueError("Unknown file type") portalocker.lock(self.userdata_file, portalocker.LOCK_SH) diff --git a/selfprivacy_api/utils/auth.py b/selfprivacy_api/utils/auth.py index f512948..ecaf9af 100644 --- a/selfprivacy_api/utils/auth.py +++ b/selfprivacy_api/utils/auth.py @@ -5,6 +5,7 @@ from datetime import datetime, timedelta import re import typing +from pydantic import BaseModel from mnemonic import Mnemonic from . import ReadUserData, UserDataFiles, WriteUserData, parse_date @@ -87,7 +88,7 @@ def is_token_name_pair_valid(token_name, token): return False -def get_token_name(token): +def get_token_name(token: str) -> typing.Optional[str]: """Return the name of the token provided""" with ReadUserData(UserDataFiles.TOKENS) as tokens: for t in tokens["tokens"]: @@ -96,11 +97,22 @@ def get_token_name(token): return None +class BasicTokenInfo(BaseModel): + """Token info""" + + name: str + date: datetime + + def get_tokens_info(): """Get all tokens info without tokens themselves""" with ReadUserData(UserDataFiles.TOKENS) as tokens: return [ - {"name": token["name"], "date": token["date"]} for token in tokens["tokens"] + BasicTokenInfo( + name=t["name"], + date=parse_date(t["date"]), + ) + for t in tokens["tokens"] ] diff --git a/selfprivacy_api/utils/block_devices.py b/selfprivacy_api/utils/block_devices.py index e6adddc..9d96d52 100644 --- a/selfprivacy_api/utils/block_devices.py +++ b/selfprivacy_api/utils/block_devices.py @@ -16,13 +16,13 @@ def get_block_device(device_name): "-J", "-b", "-o", - "NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINT,LABEL,UUID,SIZE, MODEL,SERIAL,TYPE", - device_name, + "NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINTS,LABEL,UUID,SIZE,MODEL,SERIAL,TYPE", + f"/dev/{device_name}", ] ) lsblk_output = lsblk_output.decode("utf-8") lsblk_output = json.loads(lsblk_output) - return lsblk_output["blockdevices"] + return lsblk_output["blockdevices"][0] def resize_block_device(block_device) -> bool: @@ -30,9 +30,11 @@ def resize_block_device(block_device) -> bool: Resize a block device. Return True if successful. """ resize_command = ["resize2fs", block_device] - resize_process = subprocess.Popen(resize_command, shell=False) - resize_process.communicate() - return resize_process.returncode == 0 + try: + subprocess.check_output(resize_command, shell=False) + except subprocess.CalledProcessError: + return False + return True class BlockDevice: @@ -43,14 +45,14 @@ class BlockDevice: def __init__(self, block_device): self.name = block_device["name"] self.path = block_device["path"] - self.fsavail = block_device["fsavail"] - self.fssize = block_device["fssize"] + self.fsavail = str(block_device["fsavail"]) + self.fssize = str(block_device["fssize"]) self.fstype = block_device["fstype"] - self.fsused = block_device["fsused"] - self.mountpoint = block_device["mountpoint"] + self.fsused = str(block_device["fsused"]) + self.mountpoints = block_device["mountpoints"] self.label = block_device["label"] self.uuid = block_device["uuid"] - self.size = block_device["size"] + self.size = str(block_device["size"]) self.model = block_device["model"] self.serial = block_device["serial"] self.type = block_device["type"] @@ -60,7 +62,7 @@ class BlockDevice: return self.name def __repr__(self): - return f"" + return f"" def __eq__(self, other): return self.name == other.name @@ -73,14 +75,14 @@ class BlockDevice: Update current data and return a dictionary of stats. """ device = get_block_device(self.name) - self.fsavail = device["fsavail"] - self.fssize = device["fssize"] + self.fsavail = str(device["fsavail"]) + self.fssize = str(device["fssize"]) self.fstype = device["fstype"] - self.fsused = device["fsused"] - self.mountpoint = device["mountpoint"] + self.fsused = str(device["fsused"]) + self.mountpoints = device["mountpoints"] self.label = device["label"] self.uuid = device["uuid"] - self.size = device["size"] + self.size = str(device["size"]) self.model = device["model"] self.serial = device["serial"] self.type = device["type"] @@ -92,7 +94,7 @@ class BlockDevice: "fssize": self.fssize, "fstype": self.fstype, "fsused": self.fsused, - "mountpoint": self.mountpoint, + "mountpoints": self.mountpoints, "label": self.label, "uuid": self.uuid, "size": self.size, @@ -170,7 +172,7 @@ class BlockDevices: "-J", "-b", "-o", - "NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINT,LABEL,UUID,SIZE,MODEL,SERIAL,TYPE", + "NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINTS,LABEL,UUID,SIZE,MODEL,SERIAL,TYPE", ] ) lsblk_output = lsblk_output.decode("utf-8") @@ -219,6 +221,6 @@ class BlockDevices: """ block_devices = [] for block_device in self.block_devices: - if block_device.mountpoint == mountpoint: + if mountpoint in block_device.mountpoints: block_devices.append(block_device) return block_devices diff --git a/selfprivacy_api/utils/huey.py b/selfprivacy_api/utils/huey.py new file mode 100644 index 0000000..034f7ba --- /dev/null +++ b/selfprivacy_api/utils/huey.py @@ -0,0 +1,14 @@ +"""MiniHuey singleton.""" +import os +from huey import SqliteHuey + +HUEY_DATABASE = "/etc/nixos/userdata/tasks.db" + +# Singleton instance containing the huey database. + +test_mode = os.environ.get("TEST_MODE") + +huey = SqliteHuey( + HUEY_DATABASE, + immediate=test_mode == "true", +) diff --git a/selfprivacy_api/utils/network.py b/selfprivacy_api/utils/network.py index 5081f0e..c1b8a2b 100644 --- a/selfprivacy_api/utils/network.py +++ b/selfprivacy_api/utils/network.py @@ -2,9 +2,10 @@ """Network utils""" import subprocess import re +from typing import Optional -def get_ip4(): +def get_ip4() -> str: """Get IPv4 address""" try: ip4 = subprocess.check_output(["ip", "addr", "show", "dev", "eth0"]).decode( @@ -13,10 +14,10 @@ def get_ip4(): ip4 = re.search(r"inet (\d+\.\d+\.\d+\.\d+)\/\d+", ip4) except subprocess.CalledProcessError: ip4 = None - return ip4.group(1) if ip4 else None + return ip4.group(1) if ip4 else "" -def get_ip6(): +def get_ip6() -> str: """Get IPv6 address""" try: ip6 = subprocess.check_output(["ip", "addr", "show", "dev", "eth0"]).decode( @@ -25,4 +26,4 @@ def get_ip6(): ip6 = re.search(r"inet6 (\S+)\/\d+", ip6) except subprocess.CalledProcessError: ip6 = None - return ip6.group(1) if ip6 else None + return ip6.group(1) if ip6 else "" diff --git a/setup.py b/setup.py index 5619621..eabc165 100755 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import setup, find_packages setup( name="selfprivacy_api", - version="1.2.7", + version="2.0.0", packages=find_packages(), scripts=[ "selfprivacy_api/app.py", diff --git a/shell.nix b/shell.nix index e754a11..0ccb99d 100644 --- a/shell.nix +++ b/shell.nix @@ -1,12 +1,8 @@ { pkgs ? import { } }: let sp-python = pkgs.python39.withPackages (p: with p; [ - flask - flask-restful setuptools portalocker - flask-swagger - flask-swagger-ui pytz pytest pytest-mock @@ -18,9 +14,10 @@ let pylint pydantic typing-extensions - flask-cors psutil black + fastapi + uvicorn (buildPythonPackage rec { pname = "strawberry-graphql"; version = "0.123.0"; @@ -32,11 +29,11 @@ let typing-extensions python-multipart python-dateutil - flask + # flask pydantic pygments poetry - flask-cors + # flask-cors (buildPythonPackage rec { pname = "graphql-core"; version = "3.2.0"; diff --git a/tests/conftest.py b/tests/conftest.py index fb31456..ea7a66a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,9 +1,13 @@ """Tests configuration.""" # pylint: disable=redefined-outer-name # pylint: disable=unused-argument +import os import pytest -from flask import testing -from selfprivacy_api.app import create_app +from fastapi.testclient import TestClient + + +def pytest_generate_tests(metafunc): + os.environ["TEST_MODE"] = "true" @pytest.fixture @@ -16,66 +20,43 @@ def tokens_file(mocker, shared_datadir): @pytest.fixture -def app(): - """Flask application.""" - app = create_app( - { - "ENABLE_SWAGGER": "1", - } +def jobs_file(mocker, shared_datadir): + """Mock tokens file.""" + mock = mocker.patch("selfprivacy_api.utils.JOBS_FILE", shared_datadir / "jobs.json") + return mock + + +@pytest.fixture +def huey_database(mocker, shared_datadir): + """Mock huey database.""" + mock = mocker.patch( + "selfprivacy_api.utils.huey.HUEY_DATABASE", shared_datadir / "huey.db" ) - - yield app + return mock @pytest.fixture -def client(app, tokens_file): - """Flask unauthorized test client.""" - return app.test_client() +def client(tokens_file, huey_database, jobs_file): + from selfprivacy_api.app import app - -class AuthorizedClient(testing.FlaskClient): - """Flask authorized test client.""" - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.token = "TEST_TOKEN" - - def open(self, *args, **kwargs): - if "headers" not in kwargs: - kwargs["headers"] = {} - kwargs["headers"]["Authorization"] = f"Bearer {self.token}" - return super().open(*args, **kwargs) - - -class WrongAuthClient(testing.FlaskClient): - """Flask client with wrong token""" - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.token = "WRONG_TOKEN" - - def open(self, *args, **kwargs): - if "headers" not in kwargs: - kwargs["headers"] = {} - kwargs["headers"]["Authorization"] = f"Bearer {self.token}" - return super().open(*args, **kwargs) + return TestClient(app) @pytest.fixture -def authorized_client(app, tokens_file): +def authorized_client(tokens_file, huey_database, jobs_file): """Authorized test client fixture.""" - app.test_client_class = AuthorizedClient - return app.test_client() + from selfprivacy_api.app import app + + client = TestClient(app) + client.headers.update({"Authorization": "Bearer TEST_TOKEN"}) + return client @pytest.fixture -def wrong_auth_client(app, tokens_file): +def wrong_auth_client(tokens_file, huey_database, jobs_file): """Wrong token test client fixture.""" - app.test_client_class = WrongAuthClient - return app.test_client() + from selfprivacy_api.app import app - -@pytest.fixture -def runner(app, tokens_file): - """Flask test runner.""" - return app.test_cli_runner() + client = TestClient(app) + client.headers.update({"Authorization": "Bearer WRONG_TOKEN"}) + return client diff --git a/tests/data/jobs.json b/tests/data/jobs.json new file mode 100644 index 0000000..0967ef4 --- /dev/null +++ b/tests/data/jobs.json @@ -0,0 +1 @@ +{} diff --git a/tests/test_block_device_utils.py b/tests/test_block_device_utils.py new file mode 100644 index 0000000..2676e6c --- /dev/null +++ b/tests/test_block_device_utils.py @@ -0,0 +1,490 @@ +#!/usr/bin/env python3 +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=missing-function-docstring +import json +import subprocess +import pytest + +from selfprivacy_api.utils.block_devices import ( + BlockDevice, + BlockDevices, + get_block_device, + resize_block_device, +) +from tests.common import read_json + +SINGLE_LSBLK_OUTPUT = b""" +{ + "blockdevices": [ + { + "name": "sda1", + "path": "/dev/sda1", + "fsavail": "4614107136", + "fssize": "19814920192", + "fstype": "ext4", + "fsused": "14345314304", + "mountpoints": [ + "/nix/store", "/" + ], + "label": null, + "uuid": "ec80c004-baec-4a2c-851d-0e1807135511", + "size": 20210236928, + "model": null, + "serial": null, + "type": "part" + } + ] +} +""" + + +@pytest.fixture +def lsblk_singular_mock(mocker): + mock = mocker.patch( + "subprocess.check_output", autospec=True, return_value=SINGLE_LSBLK_OUTPUT + ) + return mock + + +@pytest.fixture +def failed_check_output_mock(mocker): + mock = mocker.patch( + "subprocess.check_output", + autospec=True, + side_effect=subprocess.CalledProcessError( + returncode=1, cmd=["some", "command"] + ), + ) + return mock + + +@pytest.fixture +def only_root_in_userdata(mocker, datadir): + mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "only_root.json") + assert read_json(datadir / "only_root.json")["volumes"][0]["device"] == "/dev/sda1" + assert ( + read_json(datadir / "only_root.json")["volumes"][0]["mountPoint"] + == "/volumes/sda1" + ) + assert read_json(datadir / "only_root.json")["volumes"][0]["filesystem"] == "ext4" + return datadir + + +@pytest.fixture +def no_devices_in_userdata(mocker, datadir): + mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "no_devices.json") + assert read_json(datadir / "no_devices.json")["volumes"] == [] + return datadir + + +@pytest.fixture +def undefined_devices_in_userdata(mocker, datadir): + mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "undefined.json") + assert "volumes" not in read_json(datadir / "undefined.json") + return datadir + + +def test_create_block_device_object(lsblk_singular_mock, authorized_client): + output = get_block_device("sda1") + assert lsblk_singular_mock.call_count == 1 + assert lsblk_singular_mock.call_args[0][0] == [ + "lsblk", + "-J", + "-b", + "-o", + "NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINTS,LABEL,UUID,SIZE,MODEL,SERIAL,TYPE", + "/dev/sda1", + ] + assert output == json.loads(SINGLE_LSBLK_OUTPUT)["blockdevices"][0] + + +def test_resize_block_device(lsblk_singular_mock, authorized_client): + result = resize_block_device("sdb") + assert result is True + assert lsblk_singular_mock.call_count == 1 + assert lsblk_singular_mock.call_args[0][0] == [ + "resize2fs", + "sdb", + ] + + +def test_resize_block_device_failed(failed_check_output_mock, authorized_client): + result = resize_block_device("sdb") + assert result is False + assert failed_check_output_mock.call_count == 1 + assert failed_check_output_mock.call_args[0][0] == [ + "resize2fs", + "sdb", + ] + + +VOLUME_LSBLK_OUTPUT = b""" +{ + "blockdevices": [ + { + "name": "sdb", + "path": "/dev/sdb", + "fsavail": "11888545792", + "fssize": "12573614080", + "fstype": "ext4", + "fsused": "24047616", + "mountpoints": [ + "/volumes/sdb" + ], + "label": null, + "uuid": "fa9d0026-ee23-4047-b8b1-297ae16fa751", + "size": 12884901888, + "model": "Volume", + "serial": "21378102", + "type": "disk" + } + ] +} +""" + + +def test_create_block_device(lsblk_singular_mock, authorized_client): + block_device = BlockDevice(json.loads(VOLUME_LSBLK_OUTPUT)["blockdevices"][0]) + + assert block_device.name == "sdb" + assert block_device.path == "/dev/sdb" + assert block_device.fsavail == "11888545792" + assert block_device.fssize == "12573614080" + assert block_device.fstype == "ext4" + assert block_device.fsused == "24047616" + assert block_device.mountpoints == ["/volumes/sdb"] + assert block_device.label is None + assert block_device.uuid == "fa9d0026-ee23-4047-b8b1-297ae16fa751" + assert block_device.size == "12884901888" + assert block_device.model == "Volume" + assert block_device.serial == "21378102" + assert block_device.type == "disk" + assert block_device.locked is False + assert str(block_device) == "sdb" + assert ( + repr(block_device) + == "" + ) + assert hash(block_device) == hash("sdb") + + +def test_block_devices_equal(lsblk_singular_mock, authorized_client): + block_device = BlockDevice(json.loads(VOLUME_LSBLK_OUTPUT)["blockdevices"][0]) + block_device2 = BlockDevice(json.loads(VOLUME_LSBLK_OUTPUT)["blockdevices"][0]) + + assert block_device == block_device2 + + +@pytest.fixture +def resize_block_mock(mocker): + mock = mocker.patch( + "selfprivacy_api.utils.block_devices.resize_block_device", + autospec=True, + return_value=True, + ) + return mock + + +def test_call_resize_from_block_device( + lsblk_singular_mock, resize_block_mock, authorized_client +): + block_device = BlockDevice(json.loads(VOLUME_LSBLK_OUTPUT)["blockdevices"][0]) + block_device.resize() + assert resize_block_mock.call_count == 1 + assert resize_block_mock.call_args[0][0] == "/dev/sdb" + assert lsblk_singular_mock.call_count == 0 + + +def test_get_stats_from_block_device(lsblk_singular_mock, authorized_client): + block_device = BlockDevice(json.loads(SINGLE_LSBLK_OUTPUT)["blockdevices"][0]) + stats = block_device.stats() + assert stats == { + "name": "sda1", + "path": "/dev/sda1", + "fsavail": "4614107136", + "fssize": "19814920192", + "fstype": "ext4", + "fsused": "14345314304", + "mountpoints": ["/nix/store", "/"], + "label": None, + "uuid": "ec80c004-baec-4a2c-851d-0e1807135511", + "size": "20210236928", + "model": None, + "serial": None, + "type": "part", + } + assert lsblk_singular_mock.call_count == 1 + assert lsblk_singular_mock.call_args[0][0] == [ + "lsblk", + "-J", + "-b", + "-o", + "NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINTS,LABEL,UUID,SIZE,MODEL,SERIAL,TYPE", + "/dev/sda1", + ] + + +def test_mount_block_device( + lsblk_singular_mock, only_root_in_userdata, authorized_client +): + block_device = BlockDevice(json.loads(SINGLE_LSBLK_OUTPUT)["blockdevices"][0]) + result = block_device.mount() + assert result is False + volume = BlockDevice(json.loads(VOLUME_LSBLK_OUTPUT)["blockdevices"][0]) + result = volume.mount() + assert result is True + assert ( + read_json(only_root_in_userdata / "only_root.json")["volumes"][1]["device"] + == "/dev/sdb" + ) + assert ( + read_json(only_root_in_userdata / "only_root.json")["volumes"][1]["mountPoint"] + == "/volumes/sdb" + ) + assert ( + read_json(only_root_in_userdata / "only_root.json")["volumes"][1]["fsType"] + == "ext4" + ) + + +def test_mount_block_device_when_undefined( + lsblk_singular_mock, undefined_devices_in_userdata, authorized_client +): + block_device = BlockDevice(json.loads(SINGLE_LSBLK_OUTPUT)["blockdevices"][0]) + result = block_device.mount() + assert result is True + assert ( + read_json(undefined_devices_in_userdata / "undefined.json")["volumes"][0][ + "device" + ] + == "/dev/sda1" + ) + assert ( + read_json(undefined_devices_in_userdata / "undefined.json")["volumes"][0][ + "mountPoint" + ] + == "/volumes/sda1" + ) + assert ( + read_json(undefined_devices_in_userdata / "undefined.json")["volumes"][0][ + "fsType" + ] + == "ext4" + ) + + +def test_unmount_block_device( + lsblk_singular_mock, only_root_in_userdata, authorized_client +): + block_device = BlockDevice(json.loads(SINGLE_LSBLK_OUTPUT)["blockdevices"][0]) + result = block_device.unmount() + assert result is True + volume = BlockDevice(json.loads(VOLUME_LSBLK_OUTPUT)["blockdevices"][0]) + result = volume.unmount() + assert result is False + assert len(read_json(only_root_in_userdata / "only_root.json")["volumes"]) == 0 + + +def test_unmount_block_device_when_undefined( + lsblk_singular_mock, undefined_devices_in_userdata, authorized_client +): + block_device = BlockDevice(json.loads(SINGLE_LSBLK_OUTPUT)["blockdevices"][0]) + result = block_device.unmount() + assert result is False + assert ( + len(read_json(undefined_devices_in_userdata / "undefined.json")["volumes"]) == 0 + ) + + +FULL_LSBLK_OUTPUT = b""" +{ + "blockdevices": [ + { + "name": "sda", + "path": "/dev/sda", + "fsavail": null, + "fssize": null, + "fstype": null, + "fsused": null, + "mountpoints": [ + null + ], + "label": null, + "uuid": null, + "size": 20480786432, + "model": "QEMU HARDDISK", + "serial": "drive-scsi0-0-0-0", + "type": "disk", + "children": [ + { + "name": "sda1", + "path": "/dev/sda1", + "fsavail": "4605702144", + "fssize": "19814920192", + "fstype": "ext4", + "fsused": "14353719296", + "mountpoints": [ + "/nix/store", "/" + ], + "label": null, + "uuid": "ec80c004-baec-4a2c-851d-0e1807135511", + "size": 20210236928, + "model": null, + "serial": null, + "type": "part" + },{ + "name": "sda14", + "path": "/dev/sda14", + "fsavail": null, + "fssize": null, + "fstype": null, + "fsused": null, + "mountpoints": [ + null + ], + "label": null, + "uuid": null, + "size": 1048576, + "model": null, + "serial": null, + "type": "part" + },{ + "name": "sda15", + "path": "/dev/sda15", + "fsavail": null, + "fssize": null, + "fstype": "vfat", + "fsused": null, + "mountpoints": [ + null + ], + "label": null, + "uuid": "6B29-5BA7", + "size": 268435456, + "model": null, + "serial": null, + "type": "part" + } + ] + },{ + "name": "sdb", + "path": "/dev/sdb", + "fsavail": "11888545792", + "fssize": "12573614080", + "fstype": "ext4", + "fsused": "24047616", + "mountpoints": [ + "/volumes/sdb" + ], + "label": null, + "uuid": "fa9d0026-ee23-4047-b8b1-297ae16fa751", + "size": 12884901888, + "model": "Volume", + "serial": "21378102", + "type": "disk" + },{ + "name": "sr0", + "path": "/dev/sr0", + "fsavail": null, + "fssize": null, + "fstype": null, + "fsused": null, + "mountpoints": [ + null + ], + "label": null, + "uuid": null, + "size": 1073741312, + "model": "QEMU DVD-ROM", + "serial": "QM00003", + "type": "rom" + } + ] +} +""" + + +@pytest.fixture +def lsblk_full_mock(mocker): + mock = mocker.patch( + "subprocess.check_output", autospec=True, return_value=FULL_LSBLK_OUTPUT + ) + return mock + + +def test_get_block_devices(lsblk_full_mock, authorized_client): + block_devices = BlockDevices().get_block_devices() + assert len(block_devices) == 2 + assert block_devices[0].name == "sda1" + assert block_devices[0].path == "/dev/sda1" + assert block_devices[0].fsavail == "4605702144" + assert block_devices[0].fssize == "19814920192" + assert block_devices[0].fstype == "ext4" + assert block_devices[0].fsused == "14353719296" + assert block_devices[0].mountpoints == ["/nix/store", "/"] + assert block_devices[0].label is None + assert block_devices[0].uuid == "ec80c004-baec-4a2c-851d-0e1807135511" + assert block_devices[0].size == "20210236928" + assert block_devices[0].model is None + assert block_devices[0].serial is None + assert block_devices[0].type == "part" + assert block_devices[1].name == "sdb" + assert block_devices[1].path == "/dev/sdb" + assert block_devices[1].fsavail == "11888545792" + assert block_devices[1].fssize == "12573614080" + assert block_devices[1].fstype == "ext4" + assert block_devices[1].fsused == "24047616" + assert block_devices[1].mountpoints == ["/volumes/sdb"] + assert block_devices[1].label is None + assert block_devices[1].uuid == "fa9d0026-ee23-4047-b8b1-297ae16fa751" + assert block_devices[1].size == "12884901888" + assert block_devices[1].model == "Volume" + assert block_devices[1].serial == "21378102" + assert block_devices[1].type == "disk" + + +def test_get_block_device(lsblk_full_mock, authorized_client): + block_device = BlockDevices().get_block_device("sda1") + assert block_device is not None + assert block_device.name == "sda1" + assert block_device.path == "/dev/sda1" + assert block_device.fsavail == "4605702144" + assert block_device.fssize == "19814920192" + assert block_device.fstype == "ext4" + assert block_device.fsused == "14353719296" + assert block_device.mountpoints == ["/nix/store", "/"] + assert block_device.label is None + assert block_device.uuid == "ec80c004-baec-4a2c-851d-0e1807135511" + assert block_device.size == "20210236928" + assert block_device.model is None + assert block_device.serial is None + assert block_device.type == "part" + + +def test_get_nonexistent_block_device(lsblk_full_mock, authorized_client): + block_device = BlockDevices().get_block_device("sda2") + assert block_device is None + + +def test_get_block_devices_by_mountpoint(lsblk_full_mock, authorized_client): + block_devices = BlockDevices().get_block_devices_by_mountpoint("/nix/store") + assert len(block_devices) == 1 + assert block_devices[0].name == "sda1" + assert block_devices[0].path == "/dev/sda1" + assert block_devices[0].fsavail == "4605702144" + assert block_devices[0].fssize == "19814920192" + assert block_devices[0].fstype == "ext4" + assert block_devices[0].fsused == "14353719296" + assert block_devices[0].mountpoints == ["/nix/store", "/"] + assert block_devices[0].label is None + assert block_devices[0].uuid == "ec80c004-baec-4a2c-851d-0e1807135511" + assert block_devices[0].size == "20210236928" + assert block_devices[0].model is None + assert block_devices[0].serial is None + assert block_devices[0].type == "part" + + +def test_get_block_devices_by_mountpoint_no_match(lsblk_full_mock, authorized_client): + block_devices = BlockDevices().get_block_devices_by_mountpoint("/foo") + assert len(block_devices) == 0 diff --git a/tests/test_block_device_utils/no_devices.json b/tests/test_block_device_utils/no_devices.json new file mode 100644 index 0000000..97300ca --- /dev/null +++ b/tests/test_block_device_utils/no_devices.json @@ -0,0 +1,54 @@ +{ + "backblaze": { + "accountId": "ID", + "accountKey": "KEY", + "bucket": "selfprivacy" + }, + "api": { + "token": "TEST_TOKEN", + "enableSwagger": false + }, + "bitwarden": { + "enable": true + }, + "cloudflare": { + "apiKey": "TOKEN" + }, + "databasePassword": "PASSWORD", + "domain": "test.tld", + "hashedMasterPassword": "HASHED_PASSWORD", + "hostname": "test-instance", + "nextcloud": { + "adminPassword": "ADMIN", + "databasePassword": "ADMIN", + "enable": true + }, + "resticPassword": "PASS", + "ssh": { + "enable": true, + "passwordAuthentication": true, + "rootKeys": [ + "ssh-ed25519 KEY test@pc" + ] + }, + "username": "tester", + "gitea": { + "enable": false + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true + }, + "autoUpgrade": { + "enable": true, + "allowReboot": true + }, + "timezone": "Europe/Moscow", + "sshKeys": [ + "ssh-rsa KEY test@pc" + ], + "volumes": [ + ] +} diff --git a/tests/test_block_device_utils/only_root.json b/tests/test_block_device_utils/only_root.json new file mode 100644 index 0000000..0f8ec0d --- /dev/null +++ b/tests/test_block_device_utils/only_root.json @@ -0,0 +1,59 @@ +{ + "backblaze": { + "accountId": "ID", + "accountKey": "KEY", + "bucket": "selfprivacy" + }, + "api": { + "token": "TEST_TOKEN", + "enableSwagger": false + }, + "bitwarden": { + "enable": true + }, + "cloudflare": { + "apiKey": "TOKEN" + }, + "databasePassword": "PASSWORD", + "domain": "test.tld", + "hashedMasterPassword": "HASHED_PASSWORD", + "hostname": "test-instance", + "nextcloud": { + "adminPassword": "ADMIN", + "databasePassword": "ADMIN", + "enable": true + }, + "resticPassword": "PASS", + "ssh": { + "enable": true, + "passwordAuthentication": true, + "rootKeys": [ + "ssh-ed25519 KEY test@pc" + ] + }, + "username": "tester", + "gitea": { + "enable": false + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true + }, + "autoUpgrade": { + "enable": true, + "allowReboot": true + }, + "timezone": "Europe/Moscow", + "sshKeys": [ + "ssh-rsa KEY test@pc" + ], + "volumes": [ + { + "device": "/dev/sda1", + "mountPoint": "/volumes/sda1", + "filesystem": "ext4" + } + ] +} diff --git a/tests/test_block_device_utils/undefined.json b/tests/test_block_device_utils/undefined.json new file mode 100644 index 0000000..eb660cc --- /dev/null +++ b/tests/test_block_device_utils/undefined.json @@ -0,0 +1,52 @@ +{ + "backblaze": { + "accountId": "ID", + "accountKey": "KEY", + "bucket": "selfprivacy" + }, + "api": { + "token": "TEST_TOKEN", + "enableSwagger": false + }, + "bitwarden": { + "enable": true + }, + "cloudflare": { + "apiKey": "TOKEN" + }, + "databasePassword": "PASSWORD", + "domain": "test.tld", + "hashedMasterPassword": "HASHED_PASSWORD", + "hostname": "test-instance", + "nextcloud": { + "adminPassword": "ADMIN", + "databasePassword": "ADMIN", + "enable": true + }, + "resticPassword": "PASS", + "ssh": { + "enable": true, + "passwordAuthentication": true, + "rootKeys": [ + "ssh-ed25519 KEY test@pc" + ] + }, + "username": "tester", + "gitea": { + "enable": false + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true + }, + "autoUpgrade": { + "enable": true, + "allowReboot": true + }, + "timezone": "Europe/Moscow", + "sshKeys": [ + "ssh-rsa KEY test@pc" + ] +} diff --git a/tests/test_common.py b/tests/test_common.py index db60d84..e5d3f62 100644 --- a/tests/test_common.py +++ b/tests/test_common.py @@ -1,6 +1,7 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument import json +import os import pytest from selfprivacy_api.utils import WriteUserData, ReadUserData @@ -9,19 +10,13 @@ from selfprivacy_api.utils import WriteUserData, ReadUserData def test_get_api_version(authorized_client): response = authorized_client.get("/api/version") assert response.status_code == 200 - assert "version" in response.get_json() + assert "version" in response.json() def test_get_api_version_unauthorized(client): response = client.get("/api/version") assert response.status_code == 200 - assert "version" in response.get_json() - - -def test_get_swagger_json(authorized_client): - response = authorized_client.get("/api/swagger.json") - assert response.status_code == 200 - assert "swagger" in response.get_json() + assert "version" in response.json() def test_read_invalid_user_data(): @@ -34,3 +29,12 @@ def test_write_invalid_user_data(): with pytest.raises(ValueError): with WriteUserData("invalid") as user_data: pass + + +@pytest.fixture +def test_mode(): + return os.environ.get("TEST_MODE") + + +def test_the_test_mode(test_mode): + assert test_mode == "true" diff --git a/tests/test_graphql/test_api.py b/tests/test_graphql/test_api.py index 6343d8f..16c7c4d 100644 --- a/tests/test_graphql/test_api.py +++ b/tests/test_graphql/test_api.py @@ -24,7 +24,7 @@ TOKENS_FILE_CONTETS = { def test_graphql_get_entire_api_data(authorized_client, tokens_file): - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ "query": generate_api_query( @@ -33,25 +33,25 @@ def test_graphql_get_entire_api_data(authorized_client, tokens_file): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert "version" in response.get_json()["data"]["api"] - assert response.json["data"]["api"]["devices"] is not None - assert len(response.json["data"]["api"]["devices"]) == 2 + assert response.json().get("data") is not None + assert "version" in response.json()["data"]["api"] + assert response.json()["data"]["api"]["devices"] is not None + assert len(response.json()["data"]["api"]["devices"]) == 2 assert ( - response.json["data"]["api"]["devices"][0]["creationDate"] + response.json()["data"]["api"]["devices"][0]["creationDate"] == "2022-01-14T08:31:10.789314" ) - assert response.json["data"]["api"]["devices"][0]["isCaller"] is True - assert response.json["data"]["api"]["devices"][0]["name"] == "test_token" + assert response.json()["data"]["api"]["devices"][0]["isCaller"] is True + assert response.json()["data"]["api"]["devices"][0]["name"] == "test_token" assert ( - response.json["data"]["api"]["devices"][1]["creationDate"] + response.json()["data"]["api"]["devices"][1]["creationDate"] == "2022-01-14T08:31:10.789314" ) - assert response.json["data"]["api"]["devices"][1]["isCaller"] is False - assert response.json["data"]["api"]["devices"][1]["name"] == "test_token2" - assert response.json["data"]["api"]["recoveryKey"] is not None - assert response.json["data"]["api"]["recoveryKey"]["exists"] is False - assert response.json["data"]["api"]["recoveryKey"]["valid"] is False - assert response.json["data"]["api"]["recoveryKey"]["creationDate"] is None - assert response.json["data"]["api"]["recoveryKey"]["expirationDate"] is None - assert response.json["data"]["api"]["recoveryKey"]["usesLeft"] is None + assert response.json()["data"]["api"]["devices"][1]["isCaller"] is False + assert response.json()["data"]["api"]["devices"][1]["name"] == "test_token2" + assert response.json()["data"]["api"]["recoveryKey"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["exists"] is False + assert response.json()["data"]["api"]["recoveryKey"]["valid"] is False + assert response.json()["data"]["api"]["recoveryKey"]["creationDate"] is None + assert response.json()["data"]["api"]["recoveryKey"]["expirationDate"] is None + assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] is None diff --git a/tests/test_graphql/test_api_devices.py b/tests/test_graphql/test_api_devices.py index 627d06a..d8dc974 100644 --- a/tests/test_graphql/test_api_devices.py +++ b/tests/test_graphql/test_api_devices.py @@ -31,35 +31,35 @@ devices { def test_graphql_tokens_info(authorized_client, tokens_file): - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={"query": generate_api_query([API_DEVICES_QUERY])}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["api"]["devices"] is not None - assert len(response.json["data"]["api"]["devices"]) == 2 + assert response.json().get("data") is not None + assert response.json()["data"]["api"]["devices"] is not None + assert len(response.json()["data"]["api"]["devices"]) == 2 assert ( - response.json["data"]["api"]["devices"][0]["creationDate"] + response.json()["data"]["api"]["devices"][0]["creationDate"] == "2022-01-14T08:31:10.789314" ) - assert response.json["data"]["api"]["devices"][0]["isCaller"] is True - assert response.json["data"]["api"]["devices"][0]["name"] == "test_token" + assert response.json()["data"]["api"]["devices"][0]["isCaller"] is True + assert response.json()["data"]["api"]["devices"][0]["name"] == "test_token" assert ( - response.json["data"]["api"]["devices"][1]["creationDate"] + response.json()["data"]["api"]["devices"][1]["creationDate"] == "2022-01-14T08:31:10.789314" ) - assert response.json["data"]["api"]["devices"][1]["isCaller"] is False - assert response.json["data"]["api"]["devices"][1]["name"] == "test_token2" + assert response.json()["data"]["api"]["devices"][1]["isCaller"] is False + assert response.json()["data"]["api"]["devices"][1]["name"] == "test_token2" def test_graphql_tokens_info_unauthorized(client, tokens_file): - response = client.get( + response = client.post( "/graphql", json={"query": generate_api_query([API_DEVICES_QUERY])}, ) assert response.status_code == 200 - assert response.json["data"] is None + assert response.json()["data"] is None DELETE_TOKEN_MUTATION = """ @@ -84,7 +84,7 @@ def test_graphql_delete_token_unauthorized(client, tokens_file): }, ) assert response.status_code == 200 - assert response.json["data"] is None + assert response.json()["data"] is None def test_graphql_delete_token(authorized_client, tokens_file): @@ -98,10 +98,10 @@ def test_graphql_delete_token(authorized_client, tokens_file): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["deleteDeviceApiToken"]["success"] is True - assert response.json["data"]["deleteDeviceApiToken"]["message"] is not None - assert response.json["data"]["deleteDeviceApiToken"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["deleteDeviceApiToken"]["success"] is True + assert response.json()["data"]["deleteDeviceApiToken"]["message"] is not None + assert response.json()["data"]["deleteDeviceApiToken"]["code"] == 200 assert read_json(tokens_file) == { "tokens": [ { @@ -124,10 +124,10 @@ def test_graphql_delete_self_token(authorized_client, tokens_file): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["deleteDeviceApiToken"]["success"] is False - assert response.json["data"]["deleteDeviceApiToken"]["message"] is not None - assert response.json["data"]["deleteDeviceApiToken"]["code"] == 400 + assert response.json().get("data") is not None + assert response.json()["data"]["deleteDeviceApiToken"]["success"] is False + assert response.json()["data"]["deleteDeviceApiToken"]["message"] is not None + assert response.json()["data"]["deleteDeviceApiToken"]["code"] == 400 assert read_json(tokens_file) == TOKENS_FILE_CONTETS @@ -142,10 +142,10 @@ def test_graphql_delete_nonexistent_token(authorized_client, tokens_file): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["deleteDeviceApiToken"]["success"] is False - assert response.json["data"]["deleteDeviceApiToken"]["message"] is not None - assert response.json["data"]["deleteDeviceApiToken"]["code"] == 404 + assert response.json().get("data") is not None + assert response.json()["data"]["deleteDeviceApiToken"]["success"] is False + assert response.json()["data"]["deleteDeviceApiToken"]["message"] is not None + assert response.json()["data"]["deleteDeviceApiToken"]["code"] == 404 assert read_json(tokens_file) == TOKENS_FILE_CONTETS @@ -167,7 +167,7 @@ def test_graphql_refresh_token_unauthorized(client, tokens_file): json={"query": REFRESH_TOKEN_MUTATION}, ) assert response.status_code == 200 - assert response.json["data"] is None + assert response.json()["data"] is None def test_graphql_refresh_token(authorized_client, tokens_file): @@ -176,12 +176,12 @@ def test_graphql_refresh_token(authorized_client, tokens_file): json={"query": REFRESH_TOKEN_MUTATION}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["refreshDeviceApiToken"]["success"] is True - assert response.json["data"]["refreshDeviceApiToken"]["message"] is not None - assert response.json["data"]["refreshDeviceApiToken"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["refreshDeviceApiToken"]["success"] is True + assert response.json()["data"]["refreshDeviceApiToken"]["message"] is not None + assert response.json()["data"]["refreshDeviceApiToken"]["code"] == 200 assert read_json(tokens_file)["tokens"][0] == { - "token": response.json["data"]["refreshDeviceApiToken"]["token"], + "token": response.json()["data"]["refreshDeviceApiToken"]["token"], "name": "test_token", "date": "2022-01-14 08:31:10.789314", } @@ -205,7 +205,7 @@ def test_graphql_get_new_device_auth_key_unauthorized(client, tokens_file): json={"query": NEW_DEVICE_KEY_MUTATION}, ) assert response.status_code == 200 - assert response.json["data"] is None + assert response.json()["data"] is None def test_graphql_get_new_device_auth_key(authorized_client, tokens_file): @@ -214,14 +214,16 @@ def test_graphql_get_new_device_auth_key(authorized_client, tokens_file): json={"query": NEW_DEVICE_KEY_MUTATION}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["getNewDeviceApiKey"]["success"] is True - assert response.json["data"]["getNewDeviceApiKey"]["message"] is not None - assert response.json["data"]["getNewDeviceApiKey"]["code"] == 200 - assert response.json["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12 + assert response.json().get("data") is not None + assert response.json()["data"]["getNewDeviceApiKey"]["success"] is True + assert response.json()["data"]["getNewDeviceApiKey"]["message"] is not None + assert response.json()["data"]["getNewDeviceApiKey"]["code"] == 200 + assert ( + response.json()["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12 + ) token = ( Mnemonic(language="english") - .to_entropy(response.json["data"]["getNewDeviceApiKey"]["key"]) + .to_entropy(response.json()["data"]["getNewDeviceApiKey"]["key"]) .hex() ) assert read_json(tokens_file)["new_device"]["token"] == token @@ -249,7 +251,7 @@ def test_graphql_invalidate_new_device_token_unauthorized(client, tokens_file): }, ) assert response.status_code == 200 - assert response.json["data"] is None + assert response.json()["data"] is None def test_graphql_get_and_delete_new_device_key(authorized_client, tokens_file): @@ -258,14 +260,16 @@ def test_graphql_get_and_delete_new_device_key(authorized_client, tokens_file): json={"query": NEW_DEVICE_KEY_MUTATION}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["getNewDeviceApiKey"]["success"] is True - assert response.json["data"]["getNewDeviceApiKey"]["message"] is not None - assert response.json["data"]["getNewDeviceApiKey"]["code"] == 200 - assert response.json["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12 + assert response.json().get("data") is not None + assert response.json()["data"]["getNewDeviceApiKey"]["success"] is True + assert response.json()["data"]["getNewDeviceApiKey"]["message"] is not None + assert response.json()["data"]["getNewDeviceApiKey"]["code"] == 200 + assert ( + response.json()["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12 + ) token = ( Mnemonic(language="english") - .to_entropy(response.json["data"]["getNewDeviceApiKey"]["key"]) + .to_entropy(response.json()["data"]["getNewDeviceApiKey"]["key"]) .hex() ) assert read_json(tokens_file)["new_device"]["token"] == token @@ -274,10 +278,10 @@ def test_graphql_get_and_delete_new_device_key(authorized_client, tokens_file): json={"query": INVALIDATE_NEW_DEVICE_KEY_MUTATION}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["invalidateNewDeviceApiKey"]["success"] is True - assert response.json["data"]["invalidateNewDeviceApiKey"]["message"] is not None - assert response.json["data"]["invalidateNewDeviceApiKey"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["invalidateNewDeviceApiKey"]["success"] is True + assert response.json()["data"]["invalidateNewDeviceApiKey"]["message"] is not None + assert response.json()["data"]["invalidateNewDeviceApiKey"]["code"] == 200 assert read_json(tokens_file) == TOKENS_FILE_CONTETS @@ -299,11 +303,11 @@ def test_graphql_get_and_authorize_new_device(client, authorized_client, tokens_ json={"query": NEW_DEVICE_KEY_MUTATION}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["getNewDeviceApiKey"]["success"] is True - assert response.json["data"]["getNewDeviceApiKey"]["message"] is not None - assert response.json["data"]["getNewDeviceApiKey"]["code"] == 200 - mnemonic_key = response.json["data"]["getNewDeviceApiKey"]["key"] + assert response.json().get("data") is not None + assert response.json()["data"]["getNewDeviceApiKey"]["success"] is True + assert response.json()["data"]["getNewDeviceApiKey"]["message"] is not None + assert response.json()["data"]["getNewDeviceApiKey"]["code"] == 200 + mnemonic_key = response.json()["data"]["getNewDeviceApiKey"]["key"] assert mnemonic_key.split(" ").__len__() == 12 key = Mnemonic(language="english").to_entropy(mnemonic_key).hex() assert read_json(tokens_file)["new_device"]["token"] == key @@ -320,11 +324,13 @@ def test_graphql_get_and_authorize_new_device(client, authorized_client, tokens_ }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["success"] is True - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["code"] == 200 - token = response.json["data"]["authorizeWithNewDeviceApiKey"]["token"] + assert response.json().get("data") is not None + assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["success"] is True + assert ( + response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None + ) + assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 200 + token = response.json()["data"]["authorizeWithNewDeviceApiKey"]["token"] assert read_json(tokens_file)["tokens"][2]["token"] == token assert read_json(tokens_file)["tokens"][2]["name"] == "new_device" @@ -343,10 +349,12 @@ def test_graphql_authorize_new_device_with_invalid_key(client, tokens_file): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["success"] is False - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404 + assert response.json().get("data") is not None + assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["success"] is False + assert ( + response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None + ) + assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404 assert read_json(tokens_file) == TOKENS_FILE_CONTETS @@ -356,11 +364,11 @@ def test_graphql_get_and_authorize_used_key(client, authorized_client, tokens_fi json={"query": NEW_DEVICE_KEY_MUTATION}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["getNewDeviceApiKey"]["success"] is True - assert response.json["data"]["getNewDeviceApiKey"]["message"] is not None - assert response.json["data"]["getNewDeviceApiKey"]["code"] == 200 - mnemonic_key = response.json["data"]["getNewDeviceApiKey"]["key"] + assert response.json().get("data") is not None + assert response.json()["data"]["getNewDeviceApiKey"]["success"] is True + assert response.json()["data"]["getNewDeviceApiKey"]["message"] is not None + assert response.json()["data"]["getNewDeviceApiKey"]["code"] == 200 + mnemonic_key = response.json()["data"]["getNewDeviceApiKey"]["key"] assert mnemonic_key.split(" ").__len__() == 12 key = Mnemonic(language="english").to_entropy(mnemonic_key).hex() assert read_json(tokens_file)["new_device"]["token"] == key @@ -377,13 +385,15 @@ def test_graphql_get_and_authorize_used_key(client, authorized_client, tokens_fi }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["success"] is True - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["success"] is True + assert ( + response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None + ) + assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 200 assert ( read_json(tokens_file)["tokens"][2]["token"] - == response.json["data"]["authorizeWithNewDeviceApiKey"]["token"] + == response.json()["data"]["authorizeWithNewDeviceApiKey"]["token"] ) assert read_json(tokens_file)["tokens"][2]["name"] == "new_token" @@ -400,10 +410,12 @@ def test_graphql_get_and_authorize_used_key(client, authorized_client, tokens_fi }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["success"] is False - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404 + assert response.json().get("data") is not None + assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["success"] is False + assert ( + response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None + ) + assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404 assert read_json(tokens_file)["tokens"].__len__() == 3 @@ -415,14 +427,16 @@ def test_graphql_get_and_authorize_key_after_12_minutes( json={"query": NEW_DEVICE_KEY_MUTATION}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["getNewDeviceApiKey"]["success"] is True - assert response.json["data"]["getNewDeviceApiKey"]["message"] is not None - assert response.json["data"]["getNewDeviceApiKey"]["code"] == 200 - assert response.json["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12 + assert response.json().get("data") is not None + assert response.json()["data"]["getNewDeviceApiKey"]["success"] is True + assert response.json()["data"]["getNewDeviceApiKey"]["message"] is not None + assert response.json()["data"]["getNewDeviceApiKey"]["code"] == 200 + assert ( + response.json()["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12 + ) key = ( Mnemonic(language="english") - .to_entropy(response.json["data"]["getNewDeviceApiKey"]["key"]) + .to_entropy(response.json()["data"]["getNewDeviceApiKey"]["key"]) .hex() ) assert read_json(tokens_file)["new_device"]["token"] == key @@ -446,10 +460,12 @@ def test_graphql_get_and_authorize_key_after_12_minutes( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["success"] is False - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None - assert response.json["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404 + assert response.json().get("data") is not None + assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["success"] is False + assert ( + response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None + ) + assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404 def test_graphql_authorize_without_token(client, tokens_file): @@ -465,4 +481,4 @@ def test_graphql_authorize_without_token(client, tokens_file): }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None diff --git a/tests/test_graphql/test_api_recovery.py b/tests/test_graphql/test_api_recovery.py index be0fdff..c5e229e 100644 --- a/tests/test_graphql/test_api_recovery.py +++ b/tests/test_graphql/test_api_recovery.py @@ -37,22 +37,22 @@ def test_graphql_recovery_key_status_unauthorized(client, tokens_file): json={"query": generate_api_query([API_RECOVERY_QUERY])}, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None def test_graphql_recovery_key_status_when_none_exists(authorized_client, tokens_file): - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={"query": generate_api_query([API_RECOVERY_QUERY])}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["api"]["recoveryKey"] is not None - assert response.json["data"]["api"]["recoveryKey"]["exists"] is False - assert response.json["data"]["api"]["recoveryKey"]["valid"] is False - assert response.json["data"]["api"]["recoveryKey"]["creationDate"] is None - assert response.json["data"]["api"]["recoveryKey"]["expirationDate"] is None - assert response.json["data"]["api"]["recoveryKey"]["usesLeft"] is None + assert response.json().get("data") is not None + assert response.json()["data"]["api"]["recoveryKey"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["exists"] is False + assert response.json()["data"]["api"]["recoveryKey"]["valid"] is False + assert response.json()["data"]["api"]["recoveryKey"]["creationDate"] is None + assert response.json()["data"]["api"]["recoveryKey"]["expirationDate"] is None + assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] is None API_RECOVERY_KEY_GENERATE_MUTATION = """ @@ -86,18 +86,19 @@ def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["getNewRecoveryApiKey"]["success"] is True - assert response.json["data"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json["data"]["getNewRecoveryApiKey"]["code"] == 200 - assert response.json["data"]["getNewRecoveryApiKey"]["key"] is not None + assert response.json().get("data") is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is True + assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is not None assert ( - response.json["data"]["getNewRecoveryApiKey"]["key"].split(" ").__len__() == 18 + response.json()["data"]["getNewRecoveryApiKey"]["key"].split(" ").__len__() + == 18 ) assert read_json(tokens_file)["recovery_token"] is not None time_generated = read_json(tokens_file)["recovery_token"]["date"] assert time_generated is not None - key = response.json["data"]["getNewRecoveryApiKey"]["key"] + key = response.json()["data"]["getNewRecoveryApiKey"]["key"] assert ( datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%f") - datetime.timedelta(seconds=5) @@ -105,20 +106,20 @@ def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): ) # Try to get token status - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={"query": generate_api_query([API_RECOVERY_QUERY])}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["api"]["recoveryKey"] is not None - assert response.json["data"]["api"]["recoveryKey"]["exists"] is True - assert response.json["data"]["api"]["recoveryKey"]["valid"] is True - assert response.json["data"]["api"]["recoveryKey"][ + assert response.json().get("data") is not None + assert response.json()["data"]["api"]["recoveryKey"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True + assert response.json()["data"]["api"]["recoveryKey"]["valid"] is True + assert response.json()["data"]["api"]["recoveryKey"][ "creationDate" ] == time_generated.replace("Z", "") - assert response.json["data"]["api"]["recoveryKey"]["expirationDate"] is None - assert response.json["data"]["api"]["recoveryKey"]["usesLeft"] is None + assert response.json()["data"]["api"]["recoveryKey"]["expirationDate"] is None + assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] is None # Try to use token response = client.post( @@ -134,13 +135,13 @@ def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["useRecoveryApiKey"]["success"] is True - assert response.json["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json["data"]["useRecoveryApiKey"]["code"] == 200 - assert response.json["data"]["useRecoveryApiKey"]["token"] is not None + assert response.json().get("data") is not None + assert response.json()["data"]["useRecoveryApiKey"]["success"] is True + assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None assert ( - response.json["data"]["useRecoveryApiKey"]["token"] + response.json()["data"]["useRecoveryApiKey"]["token"] == read_json(tokens_file)["tokens"][2]["token"] ) assert read_json(tokens_file)["tokens"][2]["name"] == "new_test_token" @@ -159,13 +160,13 @@ def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["useRecoveryApiKey"]["success"] is True - assert response.json["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json["data"]["useRecoveryApiKey"]["code"] == 200 - assert response.json["data"]["useRecoveryApiKey"]["token"] is not None + assert response.json().get("data") is not None + assert response.json()["data"]["useRecoveryApiKey"]["success"] is True + assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None assert ( - response.json["data"]["useRecoveryApiKey"]["token"] + response.json()["data"]["useRecoveryApiKey"]["token"] == read_json(tokens_file)["tokens"][3]["token"] ) assert read_json(tokens_file)["tokens"][3]["name"] == "new_test_token2" @@ -188,17 +189,18 @@ def test_graphql_generate_recovery_key_with_expiration_date( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["getNewRecoveryApiKey"]["success"] is True - assert response.json["data"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json["data"]["getNewRecoveryApiKey"]["code"] == 200 - assert response.json["data"]["getNewRecoveryApiKey"]["key"] is not None + assert response.json().get("data") is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is True + assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is not None assert ( - response.json["data"]["getNewRecoveryApiKey"]["key"].split(" ").__len__() == 18 + response.json()["data"]["getNewRecoveryApiKey"]["key"].split(" ").__len__() + == 18 ) assert read_json(tokens_file)["recovery_token"] is not None - key = response.json["data"]["getNewRecoveryApiKey"]["key"] + key = response.json()["data"]["getNewRecoveryApiKey"]["key"] assert read_json(tokens_file)["recovery_token"]["expiration"] == expiration_date_str assert read_json(tokens_file)["recovery_token"]["token"] == mnemonic_to_hex(key) @@ -211,23 +213,23 @@ def test_graphql_generate_recovery_key_with_expiration_date( ) # Try to get token status - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={"query": generate_api_query([API_RECOVERY_QUERY])}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["api"]["recoveryKey"] is not None - assert response.json["data"]["api"]["recoveryKey"]["exists"] is True - assert response.json["data"]["api"]["recoveryKey"]["valid"] is True - assert response.json["data"]["api"]["recoveryKey"][ + assert response.json().get("data") is not None + assert response.json()["data"]["api"]["recoveryKey"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True + assert response.json()["data"]["api"]["recoveryKey"]["valid"] is True + assert response.json()["data"]["api"]["recoveryKey"][ "creationDate" ] == time_generated.replace("Z", "") assert ( - response.json["data"]["api"]["recoveryKey"]["expirationDate"] + response.json()["data"]["api"]["recoveryKey"]["expirationDate"] == expiration_date_str ) - assert response.json["data"]["api"]["recoveryKey"]["usesLeft"] is None + assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] is None # Try to use token response = authorized_client.post( @@ -243,13 +245,13 @@ def test_graphql_generate_recovery_key_with_expiration_date( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["useRecoveryApiKey"]["success"] is True - assert response.json["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json["data"]["useRecoveryApiKey"]["code"] == 200 - assert response.json["data"]["useRecoveryApiKey"]["token"] is not None + assert response.json().get("data") is not None + assert response.json()["data"]["useRecoveryApiKey"]["success"] is True + assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None assert ( - response.json["data"]["useRecoveryApiKey"]["token"] + response.json()["data"]["useRecoveryApiKey"]["token"] == read_json(tokens_file)["tokens"][2]["token"] ) @@ -267,13 +269,13 @@ def test_graphql_generate_recovery_key_with_expiration_date( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["useRecoveryApiKey"]["success"] is True - assert response.json["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json["data"]["useRecoveryApiKey"]["code"] == 200 - assert response.json["data"]["useRecoveryApiKey"]["token"] is not None + assert response.json().get("data") is not None + assert response.json()["data"]["useRecoveryApiKey"]["success"] is True + assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None assert ( - response.json["data"]["useRecoveryApiKey"]["token"] + response.json()["data"]["useRecoveryApiKey"]["token"] == read_json(tokens_file)["tokens"][3]["token"] ) @@ -296,30 +298,32 @@ def test_graphql_generate_recovery_key_with_expiration_date( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["useRecoveryApiKey"]["success"] is False - assert response.json["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json["data"]["useRecoveryApiKey"]["code"] == 404 - assert response.json["data"]["useRecoveryApiKey"]["token"] is None + assert response.json().get("data") is not None + assert response.json()["data"]["useRecoveryApiKey"]["success"] is False + assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["useRecoveryApiKey"]["code"] == 404 + assert response.json()["data"]["useRecoveryApiKey"]["token"] is None assert read_json(tokens_file)["tokens"] == new_data["tokens"] # Try to get token status - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={"query": generate_api_query([API_RECOVERY_QUERY])}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["api"]["recoveryKey"] is not None - assert response.json["data"]["api"]["recoveryKey"]["exists"] is True - assert response.json["data"]["api"]["recoveryKey"]["valid"] is False - assert response.json["data"]["api"]["recoveryKey"]["creationDate"] == time_generated + assert response.json().get("data") is not None + assert response.json()["data"]["api"]["recoveryKey"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True + assert response.json()["data"]["api"]["recoveryKey"]["valid"] is False assert ( - response.json["data"]["api"]["recoveryKey"]["expirationDate"] + response.json()["data"]["api"]["recoveryKey"]["creationDate"] == time_generated + ) + assert ( + response.json()["data"]["api"]["recoveryKey"]["expirationDate"] == new_data["recovery_token"]["expiration"] ) - assert response.json["data"]["api"]["recoveryKey"]["usesLeft"] is None + assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] is None def test_graphql_generate_recovery_key_with_expiration_in_the_past( @@ -340,11 +344,11 @@ def test_graphql_generate_recovery_key_with_expiration_in_the_past( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["getNewRecoveryApiKey"]["success"] is False - assert response.json["data"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json["data"]["getNewRecoveryApiKey"]["code"] == 400 - assert response.json["data"]["getNewRecoveryApiKey"]["key"] is None + assert response.json().get("data") is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is False + assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 400 + assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is None assert "recovery_token" not in read_json(tokens_file) @@ -366,7 +370,7 @@ def test_graphql_generate_recovery_key_with_invalid_time_format( }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None assert "recovery_token" not in read_json(tokens_file) @@ -388,31 +392,31 @@ def test_graphql_generate_recovery_key_with_limited_uses( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["getNewRecoveryApiKey"]["success"] is True - assert response.json["data"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json["data"]["getNewRecoveryApiKey"]["code"] == 200 - assert response.json["data"]["getNewRecoveryApiKey"]["key"] is not None + assert response.json().get("data") is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is True + assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is not None - mnemonic_key = response.json["data"]["getNewRecoveryApiKey"]["key"] + mnemonic_key = response.json()["data"]["getNewRecoveryApiKey"]["key"] key = mnemonic_to_hex(mnemonic_key) assert read_json(tokens_file)["recovery_token"]["token"] == key assert read_json(tokens_file)["recovery_token"]["uses_left"] == 2 # Try to get token status - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={"query": generate_api_query([API_RECOVERY_QUERY])}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["api"]["recoveryKey"] is not None - assert response.json["data"]["api"]["recoveryKey"]["exists"] is True - assert response.json["data"]["api"]["recoveryKey"]["valid"] is True - assert response.json["data"]["api"]["recoveryKey"]["creationDate"] is not None - assert response.json["data"]["api"]["recoveryKey"]["expirationDate"] is None - assert response.json["data"]["api"]["recoveryKey"]["usesLeft"] == 2 + assert response.json().get("data") is not None + assert response.json()["data"]["api"]["recoveryKey"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True + assert response.json()["data"]["api"]["recoveryKey"]["valid"] is True + assert response.json()["data"]["api"]["recoveryKey"]["creationDate"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["expirationDate"] is None + assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] == 2 # Try to use token response = authorized_client.post( @@ -428,25 +432,25 @@ def test_graphql_generate_recovery_key_with_limited_uses( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["useRecoveryApiKey"]["success"] is True - assert response.json["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json["data"]["useRecoveryApiKey"]["code"] == 200 - assert response.json["data"]["useRecoveryApiKey"]["token"] is not None + assert response.json().get("data") is not None + assert response.json()["data"]["useRecoveryApiKey"]["success"] is True + assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None # Try to get token status - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={"query": generate_api_query([API_RECOVERY_QUERY])}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["api"]["recoveryKey"] is not None - assert response.json["data"]["api"]["recoveryKey"]["exists"] is True - assert response.json["data"]["api"]["recoveryKey"]["valid"] is True - assert response.json["data"]["api"]["recoveryKey"]["creationDate"] is not None - assert response.json["data"]["api"]["recoveryKey"]["expirationDate"] is None - assert response.json["data"]["api"]["recoveryKey"]["usesLeft"] == 1 + assert response.json().get("data") is not None + assert response.json()["data"]["api"]["recoveryKey"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True + assert response.json()["data"]["api"]["recoveryKey"]["valid"] is True + assert response.json()["data"]["api"]["recoveryKey"]["creationDate"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["expirationDate"] is None + assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] == 1 # Try to use token response = authorized_client.post( @@ -462,25 +466,25 @@ def test_graphql_generate_recovery_key_with_limited_uses( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["useRecoveryApiKey"]["success"] is True - assert response.json["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json["data"]["useRecoveryApiKey"]["code"] == 200 - assert response.json["data"]["useRecoveryApiKey"]["token"] is not None + assert response.json().get("data") is not None + assert response.json()["data"]["useRecoveryApiKey"]["success"] is True + assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None # Try to get token status - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={"query": generate_api_query([API_RECOVERY_QUERY])}, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["api"]["recoveryKey"] is not None - assert response.json["data"]["api"]["recoveryKey"]["exists"] is True - assert response.json["data"]["api"]["recoveryKey"]["valid"] is False - assert response.json["data"]["api"]["recoveryKey"]["creationDate"] is not None - assert response.json["data"]["api"]["recoveryKey"]["expirationDate"] is None - assert response.json["data"]["api"]["recoveryKey"]["usesLeft"] == 0 + assert response.json().get("data") is not None + assert response.json()["data"]["api"]["recoveryKey"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True + assert response.json()["data"]["api"]["recoveryKey"]["valid"] is False + assert response.json()["data"]["api"]["recoveryKey"]["creationDate"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["expirationDate"] is None + assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] == 0 # Try to use token response = authorized_client.post( @@ -496,11 +500,11 @@ def test_graphql_generate_recovery_key_with_limited_uses( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["useRecoveryApiKey"]["success"] is False - assert response.json["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json["data"]["useRecoveryApiKey"]["code"] == 404 - assert response.json["data"]["useRecoveryApiKey"]["token"] is None + assert response.json().get("data") is not None + assert response.json()["data"]["useRecoveryApiKey"]["success"] is False + assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["useRecoveryApiKey"]["code"] == 404 + assert response.json()["data"]["useRecoveryApiKey"]["token"] is None def test_graphql_generate_recovery_key_with_negative_uses( @@ -519,11 +523,11 @@ def test_graphql_generate_recovery_key_with_negative_uses( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["getNewRecoveryApiKey"]["success"] is False - assert response.json["data"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json["data"]["getNewRecoveryApiKey"]["code"] == 400 - assert response.json["data"]["getNewRecoveryApiKey"]["key"] is None + assert response.json().get("data") is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is False + assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 400 + assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is None def test_graphql_generate_recovery_key_with_zero_uses(authorized_client, tokens_file): @@ -540,8 +544,8 @@ def test_graphql_generate_recovery_key_with_zero_uses(authorized_client, tokens_ }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["getNewRecoveryApiKey"]["success"] is False - assert response.json["data"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json["data"]["getNewRecoveryApiKey"]["code"] == 400 - assert response.json["data"]["getNewRecoveryApiKey"]["key"] is None + assert response.json().get("data") is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is False + assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 400 + assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is None diff --git a/tests/test_graphql/test_api_version.py b/tests/test_graphql/test_api_version.py index 8f76035..64bcc36 100644 --- a/tests/test_graphql/test_api_version.py +++ b/tests/test_graphql/test_api_version.py @@ -8,18 +8,18 @@ API_VERSION_QUERY = "version" def test_graphql_get_api_version(authorized_client): - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={"query": generate_api_query([API_VERSION_QUERY])}, ) assert response.status_code == 200 - assert "version" in response.get_json()["data"]["api"] + assert "version" in response.json()["data"]["api"] def test_graphql_api_version_unauthorized(client): - response = client.get( + response = client.post( "/graphql", json={"query": generate_api_query([API_VERSION_QUERY])}, ) assert response.status_code == 200 - assert "version" in response.get_json()["data"]["api"] + assert "version" in response.json()["data"]["api"] diff --git a/tests/test_graphql/test_ssh.py b/tests/test_graphql/test_ssh.py index 7b48c83..4831692 100644 --- a/tests/test_graphql/test_ssh.py +++ b/tests/test_graphql/test_ssh.py @@ -71,7 +71,7 @@ def test_graphql_add_ssh_key_unauthorized(client, some_users, mock_subprocess_po }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None def test_graphql_add_ssh_key(authorized_client, some_users, mock_subprocess_popen): @@ -88,14 +88,14 @@ def test_graphql_add_ssh_key(authorized_client, some_users, mock_subprocess_pope }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["addSshKey"]["code"] == 201 - assert response.json["data"]["addSshKey"]["message"] is not None - assert response.json["data"]["addSshKey"]["success"] is True + assert response.json()["data"]["addSshKey"]["code"] == 201 + assert response.json()["data"]["addSshKey"]["message"] is not None + assert response.json()["data"]["addSshKey"]["success"] is True - assert response.json["data"]["addSshKey"]["user"]["username"] == "user1" - assert response.json["data"]["addSshKey"]["user"]["sshKeys"] == [ + assert response.json()["data"]["addSshKey"]["user"]["username"] == "user1" + assert response.json()["data"]["addSshKey"]["user"]["sshKeys"] == [ "ssh-rsa KEY user1@pc", "ssh-rsa KEY test_key@pc", ] @@ -115,14 +115,14 @@ def test_graphql_add_root_ssh_key(authorized_client, some_users, mock_subprocess }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["addSshKey"]["code"] == 201 - assert response.json["data"]["addSshKey"]["message"] is not None - assert response.json["data"]["addSshKey"]["success"] is True + assert response.json()["data"]["addSshKey"]["code"] == 201 + assert response.json()["data"]["addSshKey"]["message"] is not None + assert response.json()["data"]["addSshKey"]["success"] is True - assert response.json["data"]["addSshKey"]["user"]["username"] == "root" - assert response.json["data"]["addSshKey"]["user"]["sshKeys"] == [ + assert response.json()["data"]["addSshKey"]["user"]["username"] == "root" + assert response.json()["data"]["addSshKey"]["user"]["sshKeys"] == [ "ssh-ed25519 KEY test@pc", "ssh-rsa KEY test_key@pc", ] @@ -142,14 +142,14 @@ def test_graphql_add_main_ssh_key(authorized_client, some_users, mock_subprocess }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["addSshKey"]["code"] == 201 - assert response.json["data"]["addSshKey"]["message"] is not None - assert response.json["data"]["addSshKey"]["success"] is True + assert response.json()["data"]["addSshKey"]["code"] == 201 + assert response.json()["data"]["addSshKey"]["message"] is not None + assert response.json()["data"]["addSshKey"]["success"] is True - assert response.json["data"]["addSshKey"]["user"]["username"] == "tester" - assert response.json["data"]["addSshKey"]["user"]["sshKeys"] == [ + assert response.json()["data"]["addSshKey"]["user"]["username"] == "tester" + assert response.json()["data"]["addSshKey"]["user"]["sshKeys"] == [ "ssh-rsa KEY test@pc", "ssh-rsa KEY test_key@pc", ] @@ -169,11 +169,11 @@ def test_graphql_add_bad_ssh_key(authorized_client, some_users, mock_subprocess_ }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["addSshKey"]["code"] == 400 - assert response.json["data"]["addSshKey"]["message"] is not None - assert response.json["data"]["addSshKey"]["success"] is False + assert response.json()["data"]["addSshKey"]["code"] == 400 + assert response.json()["data"]["addSshKey"]["message"] is not None + assert response.json()["data"]["addSshKey"]["success"] is False def test_graphql_add_ssh_key_nonexistent_user( @@ -192,11 +192,11 @@ def test_graphql_add_ssh_key_nonexistent_user( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["addSshKey"]["code"] == 404 - assert response.json["data"]["addSshKey"]["message"] is not None - assert response.json["data"]["addSshKey"]["success"] is False + assert response.json()["data"]["addSshKey"]["code"] == 404 + assert response.json()["data"]["addSshKey"]["message"] is not None + assert response.json()["data"]["addSshKey"]["success"] is False API_REMOVE_SSH_KEY_MUTATION = """ @@ -228,7 +228,7 @@ def test_graphql_remove_ssh_key_unauthorized(client, some_users, mock_subprocess }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None def test_graphql_remove_ssh_key(authorized_client, some_users, mock_subprocess_popen): @@ -245,14 +245,14 @@ def test_graphql_remove_ssh_key(authorized_client, some_users, mock_subprocess_p }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["removeSshKey"]["code"] == 200 - assert response.json["data"]["removeSshKey"]["message"] is not None - assert response.json["data"]["removeSshKey"]["success"] is True + assert response.json()["data"]["removeSshKey"]["code"] == 200 + assert response.json()["data"]["removeSshKey"]["message"] is not None + assert response.json()["data"]["removeSshKey"]["success"] is True - assert response.json["data"]["removeSshKey"]["user"]["username"] == "user1" - assert response.json["data"]["removeSshKey"]["user"]["sshKeys"] == [] + assert response.json()["data"]["removeSshKey"]["user"]["username"] == "user1" + assert response.json()["data"]["removeSshKey"]["user"]["sshKeys"] == [] def test_graphql_remove_root_ssh_key( @@ -271,14 +271,14 @@ def test_graphql_remove_root_ssh_key( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["removeSshKey"]["code"] == 200 - assert response.json["data"]["removeSshKey"]["message"] is not None - assert response.json["data"]["removeSshKey"]["success"] is True + assert response.json()["data"]["removeSshKey"]["code"] == 200 + assert response.json()["data"]["removeSshKey"]["message"] is not None + assert response.json()["data"]["removeSshKey"]["success"] is True - assert response.json["data"]["removeSshKey"]["user"]["username"] == "root" - assert response.json["data"]["removeSshKey"]["user"]["sshKeys"] == [] + assert response.json()["data"]["removeSshKey"]["user"]["username"] == "root" + assert response.json()["data"]["removeSshKey"]["user"]["sshKeys"] == [] def test_graphql_remove_main_ssh_key( @@ -297,14 +297,14 @@ def test_graphql_remove_main_ssh_key( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["removeSshKey"]["code"] == 200 - assert response.json["data"]["removeSshKey"]["message"] is not None - assert response.json["data"]["removeSshKey"]["success"] is True + assert response.json()["data"]["removeSshKey"]["code"] == 200 + assert response.json()["data"]["removeSshKey"]["message"] is not None + assert response.json()["data"]["removeSshKey"]["success"] is True - assert response.json["data"]["removeSshKey"]["user"]["username"] == "tester" - assert response.json["data"]["removeSshKey"]["user"]["sshKeys"] == [] + assert response.json()["data"]["removeSshKey"]["user"]["username"] == "tester" + assert response.json()["data"]["removeSshKey"]["user"]["sshKeys"] == [] def test_graphql_remove_nonexistent_ssh_key( @@ -323,11 +323,11 @@ def test_graphql_remove_nonexistent_ssh_key( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["removeSshKey"]["code"] == 404 - assert response.json["data"]["removeSshKey"]["message"] is not None - assert response.json["data"]["removeSshKey"]["success"] is False + assert response.json()["data"]["removeSshKey"]["code"] == 404 + assert response.json()["data"]["removeSshKey"]["message"] is not None + assert response.json()["data"]["removeSshKey"]["success"] is False def test_graphql_remove_ssh_key_nonexistent_user( @@ -346,8 +346,8 @@ def test_graphql_remove_ssh_key_nonexistent_user( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["removeSshKey"]["code"] == 404 - assert response.json["data"]["removeSshKey"]["message"] is not None - assert response.json["data"]["removeSshKey"]["success"] is False + assert response.json()["data"]["removeSshKey"]["code"] == 404 + assert response.json()["data"]["removeSshKey"]["message"] is not None + assert response.json()["data"]["removeSshKey"]["success"] is False diff --git a/tests/test_graphql/_test_system.py b/tests/test_graphql/test_system.py similarity index 65% rename from tests/test_graphql/_test_system.py rename to tests/test_graphql/test_system.py index 476846a..a021a16 100644 --- a/tests/test_graphql/_test_system.py +++ b/tests/test_graphql/test_system.py @@ -124,6 +124,7 @@ def mock_dkim_key(mocker): autospec=True, return_value="I am a DKIM key", ) + return mock API_PYTHON_VERSION_INFO = """ @@ -137,27 +138,27 @@ def test_graphql_get_python_version_wrong_auth( wrong_auth_client, mock_subprocess_check_output ): """Test wrong auth""" - response = wrong_auth_client.get( + response = wrong_auth_client.post( "/graphql", json={ "query": generate_system_query([API_PYTHON_VERSION_INFO]), }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None def test_graphql_get_python_version(authorized_client, mock_subprocess_check_output): """Test get python version""" - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ "query": generate_system_query([API_PYTHON_VERSION_INFO]), }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["system"]["info"]["pythonVersion"] == "Testing Linux" + assert response.json().get("data") is not None + assert response.json()["data"]["system"]["info"]["pythonVersion"] == "Testing Linux" assert mock_subprocess_check_output.call_count == 1 assert mock_subprocess_check_output.call_args[0][0] == ["python", "-V"] @@ -173,7 +174,7 @@ def test_graphql_get_system_version_unauthorized( wrong_auth_client, mock_subprocess_check_output ): """Test wrong auth""" - response = wrong_auth_client.get( + response = wrong_auth_client.post( "/graphql", json={ "query": generate_system_query([API_SYSTEM_VERSION_INFO]), @@ -181,14 +182,14 @@ def test_graphql_get_system_version_unauthorized( ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None assert mock_subprocess_check_output.call_count == 0 def test_graphql_get_system_version(authorized_client, mock_subprocess_check_output): """Test get system version""" - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ "query": generate_system_query([API_SYSTEM_VERSION_INFO]), @@ -196,9 +197,9 @@ def test_graphql_get_system_version(authorized_client, mock_subprocess_check_out ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["system"]["info"]["systemVersion"] == "Testing Linux" + assert response.json()["data"]["system"]["info"]["systemVersion"] == "Testing Linux" assert mock_subprocess_check_output.call_count == 1 assert mock_subprocess_check_output.call_args[0][0] == ["uname", "-a"] @@ -209,7 +210,7 @@ domainInfo { hostname provider requiredDnsRecords { - type + recordType name content ttl @@ -219,14 +220,16 @@ domainInfo { """ -def dns_record(type="A", name="test.tld", content=None, ttl=3600, priority=None): +def dns_record( + record_type="A", name="test-domain.tld", content=None, ttl=3600, priority=None +): if content is None: - if type == "A": + if record_type == "A": content = "157.90.247.192" - elif type == "AAAA": + elif record_type == "AAAA": content = "fe80::9400:ff:fef1:34ae" return { - "type": type, + "recordType": record_type, "name": name, "content": content, "ttl": ttl, @@ -237,7 +240,7 @@ def dns_record(type="A", name="test.tld", content=None, ttl=3600, priority=None) def is_dns_record_in_array(records, dns_record) -> bool: for record in records: if ( - record["type"] == dns_record["type"] + record["recordType"] == dns_record["recordType"] and record["name"] == dns_record["name"] and record["content"] == dns_record["content"] and record["ttl"] == dns_record["ttl"] @@ -248,66 +251,73 @@ def is_dns_record_in_array(records, dns_record) -> bool: def test_graphql_get_domain( - authorized_client, domain_file, mock_get_ip4, mock_get_ip6, turned_on + authorized_client, domain_file, mock_get_ip4, mock_get_ip6, turned_on, mock_dkim_key ): """Test get domain""" - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ "query": generate_system_query([API_GET_DOMAIN_INFO]), }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["system"]["domainInfo"]["domain"] == "test.tld" - assert response.json["data"]["system"]["domainInfo"]["hostname"] == "test-instance" - assert response.json["data"]["system"]["domainInfo"]["provider"] == "HETZNER" - dns_records = response.json["data"]["system"]["domainInfo"]["requiredDnsRecords"] + assert response.json().get("data") is not None + assert response.json()["data"]["system"]["domainInfo"]["domain"] == "test.tld" + assert ( + response.json()["data"]["system"]["domainInfo"]["hostname"] == "test-instance" + ) + assert response.json()["data"]["system"]["domainInfo"]["provider"] == "CLOUDFLARE" + dns_records = response.json()["data"]["system"]["domainInfo"]["requiredDnsRecords"] assert is_dns_record_in_array(dns_records, dns_record()) - assert is_dns_record_in_array(dns_records, dns_record(type="AAAA")) - assert is_dns_record_in_array(dns_records, dns_record(name="api.test.tld")) + assert is_dns_record_in_array(dns_records, dns_record(record_type="AAAA")) + assert is_dns_record_in_array(dns_records, dns_record(name="api")) assert is_dns_record_in_array( - dns_records, dns_record(name="api.test.tld", type="AAAA") + dns_records, dns_record(name="api", record_type="AAAA") ) - assert is_dns_record_in_array(dns_records, dns_record(name="cloud.test.tld")) + assert is_dns_record_in_array(dns_records, dns_record(name="cloud")) assert is_dns_record_in_array( - dns_records, dns_record(name="cloud.test.tld", type="AAAA") + dns_records, dns_record(name="cloud", record_type="AAAA") ) - assert is_dns_record_in_array(dns_records, dns_record(name="git.test.tld")) + assert is_dns_record_in_array(dns_records, dns_record(name="git")) assert is_dns_record_in_array( - dns_records, dns_record(name="git.test.tld", type="AAAA") + dns_records, dns_record(name="git", record_type="AAAA") ) - assert is_dns_record_in_array(dns_records, dns_record(name="meet.test.tld")) + assert is_dns_record_in_array(dns_records, dns_record(name="meet")) assert is_dns_record_in_array( - dns_records, dns_record(name="meet.test.tld", type="AAAA") + dns_records, dns_record(name="meet", record_type="AAAA") ) - assert is_dns_record_in_array(dns_records, dns_record(name="password.test.tld")) + assert is_dns_record_in_array(dns_records, dns_record(name="password")) assert is_dns_record_in_array( - dns_records, dns_record(name="password.test.tld", type="AAAA") + dns_records, dns_record(name="password", record_type="AAAA") ) - assert is_dns_record_in_array(dns_records, dns_record(name="social.test.tld")) + assert is_dns_record_in_array(dns_records, dns_record(name="social")) assert is_dns_record_in_array( - dns_records, dns_record(name="social.test.tld", type="AAAA") + dns_records, dns_record(name="social", record_type="AAAA") ) - assert is_dns_record_in_array(dns_records, dns_record(name="vpn.test.tld")) + assert is_dns_record_in_array(dns_records, dns_record(name="vpn")) assert is_dns_record_in_array( - dns_records, dns_record(name="vpn.test.tld", type="AAAA") - ) - assert is_dns_record_in_array( - dns_records, - dns_record(name="test.tld", type="MX", content="test.tld", priority=10), + dns_records, dns_record(name="vpn", record_type="AAAA") ) assert is_dns_record_in_array( dns_records, dns_record( - name="_dmarc.test.tld", type="TXT", content="v=DMARC1; p=none", ttl=18000 + name="test-domain.tld", + record_type="MX", + content="test-domain.tld", + priority=10, ), ) assert is_dns_record_in_array( dns_records, dns_record( - name="test.tld", - type="TXT", + name="_dmarc", record_type="TXT", content="v=DMARC1; p=none", ttl=18000 + ), + ) + assert is_dns_record_in_array( + dns_records, + dns_record( + name="test-domain.tld", + record_type="TXT", content="v=spf1 a mx ip4:157.90.247.192 -all", ttl=18000, ), @@ -315,8 +325,8 @@ def test_graphql_get_domain( assert is_dns_record_in_array( dns_records, dns_record( - name="selector._domainkey.test.tld", - type="TXT", + name="selector._domainkey", + record_type="TXT", content="I am a DKIM key", ttl=18000, ), @@ -332,40 +342,42 @@ settings { def test_graphql_get_timezone_unauthorized(client, turned_on): """Test get timezone without auth""" - response = client.get( + response = client.post( "/graphql", json={ "query": generate_system_query([API_GET_TIMEZONE]), }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None def test_graphql_get_timezone(authorized_client, turned_on): """Test get timezone""" - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ "query": generate_system_query([API_GET_TIMEZONE]), }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["system"]["settings"]["timezone"] == "Europe/Moscow" + assert response.json().get("data") is not None + assert response.json()["data"]["system"]["settings"]["timezone"] == "Europe/Moscow" def test_graphql_get_timezone_on_undefined(authorized_client, undefined_config): """Test get timezone when none is defined in config""" - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ "query": generate_system_query([API_GET_TIMEZONE]), }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["system"]["settings"]["timezone"] == "Europe/Uzhgorod" + assert response.json().get("data") is not None + assert ( + response.json()["data"]["system"]["settings"]["timezone"] == "Europe/Uzhgorod" + ) API_CHANGE_TIMEZONE_MUTATION = """ @@ -392,7 +404,7 @@ def test_graphql_change_timezone_unauthorized(client, turned_on): }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None def test_graphql_change_timezone(authorized_client, turned_on): @@ -407,11 +419,11 @@ def test_graphql_change_timezone(authorized_client, turned_on): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["changeTimezone"]["success"] is True - assert response.json["data"]["changeTimezone"]["message"] is not None - assert response.json["data"]["changeTimezone"]["code"] == 200 - assert response.json["data"]["changeTimezone"]["timezone"] == "Europe/Helsinki" + assert response.json().get("data") is not None + assert response.json()["data"]["changeTimezone"]["success"] is True + assert response.json()["data"]["changeTimezone"]["message"] is not None + assert response.json()["data"]["changeTimezone"]["code"] == 200 + assert response.json()["data"]["changeTimezone"]["timezone"] == "Europe/Helsinki" assert read_json(turned_on / "turned_on.json")["timezone"] == "Europe/Helsinki" @@ -427,11 +439,11 @@ def test_graphql_change_timezone_on_undefined(authorized_client, undefined_confi }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["changeTimezone"]["success"] is True - assert response.json["data"]["changeTimezone"]["message"] is not None - assert response.json["data"]["changeTimezone"]["code"] == 200 - assert response.json["data"]["changeTimezone"]["timezone"] == "Europe/Helsinki" + assert response.json().get("data") is not None + assert response.json()["data"]["changeTimezone"]["success"] is True + assert response.json()["data"]["changeTimezone"]["message"] is not None + assert response.json()["data"]["changeTimezone"]["code"] == 200 + assert response.json()["data"]["changeTimezone"]["timezone"] == "Europe/Helsinki" assert ( read_json(undefined_config / "undefined.json")["timezone"] == "Europe/Helsinki" ) @@ -449,11 +461,11 @@ def test_graphql_change_timezone_without_timezone(authorized_client, turned_on): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["changeTimezone"]["success"] is False - assert response.json["data"]["changeTimezone"]["message"] is not None - assert response.json["data"]["changeTimezone"]["code"] == 400 - assert response.json["data"]["changeTimezone"]["timezone"] is None + assert response.json().get("data") is not None + assert response.json()["data"]["changeTimezone"]["success"] is False + assert response.json()["data"]["changeTimezone"]["message"] is not None + assert response.json()["data"]["changeTimezone"]["code"] == 400 + assert response.json()["data"]["changeTimezone"]["timezone"] is None assert read_json(turned_on / "turned_on.json")["timezone"] == "Europe/Moscow" @@ -469,18 +481,18 @@ def test_graphql_change_timezone_with_invalid_timezone(authorized_client, turned }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["changeTimezone"]["success"] is False - assert response.json["data"]["changeTimezone"]["message"] is not None - assert response.json["data"]["changeTimezone"]["code"] == 400 - assert response.json["data"]["changeTimezone"]["timezone"] is None + assert response.json().get("data") is not None + assert response.json()["data"]["changeTimezone"]["success"] is False + assert response.json()["data"]["changeTimezone"]["message"] is not None + assert response.json()["data"]["changeTimezone"]["code"] == 400 + assert response.json()["data"]["changeTimezone"]["timezone"] is None assert read_json(turned_on / "turned_on.json")["timezone"] == "Europe/Moscow" API_GET_AUTO_UPGRADE_SETTINGS_QUERY = """ settings { autoUpgrade { - enableAutoUpgrade + enable allowReboot } } @@ -489,72 +501,90 @@ settings { def test_graphql_get_auto_upgrade_unauthorized(client, turned_on): """Test get auto upgrade settings without auth""" - response = client.get( + response = client.post( "/graphql", json={ - "query": API_GET_AUTO_UPGRADE_SETTINGS_QUERY, + "query": generate_system_query([API_GET_AUTO_UPGRADE_SETTINGS_QUERY]), }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None def test_graphql_get_auto_upgrade(authorized_client, turned_on): """Test get auto upgrade settings""" - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ - "query": API_GET_AUTO_UPGRADE_SETTINGS_QUERY, + "query": generate_system_query([API_GET_AUTO_UPGRADE_SETTINGS_QUERY]), }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["settings"]["autoUpgrade"]["enableAutoUpgrade"] is True - assert response.json["data"]["settings"]["autoUpgrade"]["allowReboot"] is True + assert response.json().get("data") is not None + assert ( + response.json()["data"]["system"]["settings"]["autoUpgrade"]["enable"] is True + ) + assert ( + response.json()["data"]["system"]["settings"]["autoUpgrade"]["allowReboot"] + is True + ) def test_graphql_get_auto_upgrade_on_undefined(authorized_client, undefined_config): """Test get auto upgrade settings when none is defined in config""" - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ - "query": API_GET_AUTO_UPGRADE_SETTINGS_QUERY, + "query": generate_system_query([API_GET_AUTO_UPGRADE_SETTINGS_QUERY]), }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["settings"]["autoUpgrade"]["enableAutoUpgrade"] is True - assert response.json["data"]["settings"]["autoUpgrade"]["allowReboot"] is False + assert response.json().get("data") is not None + assert ( + response.json()["data"]["system"]["settings"]["autoUpgrade"]["enable"] is True + ) + assert ( + response.json()["data"]["system"]["settings"]["autoUpgrade"]["allowReboot"] + is False + ) def test_graphql_get_auto_upgrade_without_vlaues(authorized_client, no_values): """Test get auto upgrade settings without values""" - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ - "query": API_GET_AUTO_UPGRADE_SETTINGS_QUERY, + "query": generate_system_query([API_GET_AUTO_UPGRADE_SETTINGS_QUERY]), }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["settings"]["autoUpgrade"]["enableAutoUpgrade"] is True - assert response.json["data"]["settings"]["autoUpgrade"]["allowReboot"] is False + assert response.json().get("data") is not None + assert ( + response.json()["data"]["system"]["settings"]["autoUpgrade"]["enable"] is True + ) + assert ( + response.json()["data"]["system"]["settings"]["autoUpgrade"]["allowReboot"] + is False + ) def test_graphql_get_auto_upgrade_turned_off(authorized_client, turned_off): """Test get auto upgrade settings when turned off""" - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ - "query": API_GET_AUTO_UPGRADE_SETTINGS_QUERY, + "query": generate_system_query([API_GET_AUTO_UPGRADE_SETTINGS_QUERY]), }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None assert ( - response.json["data"]["settings"]["autoUpgrade"]["enableAutoUpgrade"] is False + response.json()["data"]["system"]["settings"]["autoUpgrade"]["enable"] is False + ) + assert ( + response.json()["data"]["system"]["settings"]["autoUpgrade"]["allowReboot"] + is False ) - assert response.json["data"]["settings"]["autoUpgrade"]["allowReboot"] is False API_CHANGE_AUTO_UPGRADE_SETTINGS = """ @@ -585,7 +615,7 @@ def test_graphql_change_auto_upgrade_unauthorized(client, turned_on): }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None def test_graphql_change_auto_upgrade(authorized_client, turned_on): @@ -603,14 +633,15 @@ def test_graphql_change_auto_upgrade(authorized_client, turned_on): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["success"] is True - assert response.json["data"]["changeAutoUpgradeSettings"]["message"] is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 assert ( - response.json["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] is False + response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + is False ) - assert response.json["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True assert read_json(turned_on / "turned_on.json")["autoUpgrade"]["enable"] is False assert read_json(turned_on / "turned_on.json")["autoUpgrade"]["allowReboot"] is True @@ -630,14 +661,15 @@ def test_graphql_change_auto_upgrade_on_undefined(authorized_client, undefined_c }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["success"] is True - assert response.json["data"]["changeAutoUpgradeSettings"]["message"] is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 assert ( - response.json["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] is False + response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + is False ) - assert response.json["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True assert ( read_json(undefined_config / "undefined.json")["autoUpgrade"]["enable"] is False ) @@ -662,14 +694,15 @@ def test_graphql_change_auto_upgrade_without_vlaues(authorized_client, no_values }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["success"] is True - assert response.json["data"]["changeAutoUpgradeSettings"]["message"] is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 assert ( - response.json["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] is True + response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + is True ) - assert response.json["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True assert read_json(no_values / "no_values.json")["autoUpgrade"]["enable"] is True assert read_json(no_values / "no_values.json")["autoUpgrade"]["allowReboot"] is True @@ -689,14 +722,15 @@ def test_graphql_change_auto_upgrade_turned_off(authorized_client, turned_off): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["success"] is True - assert response.json["data"]["changeAutoUpgradeSettings"]["message"] is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 assert ( - response.json["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] is True + response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + is True ) - assert response.json["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True assert read_json(turned_off / "turned_off.json")["autoUpgrade"]["enable"] is True assert ( read_json(turned_off / "turned_off.json")["autoUpgrade"]["allowReboot"] is True @@ -717,14 +751,15 @@ def test_grphql_change_auto_upgrade_without_enable(authorized_client, turned_off }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["success"] is True - assert response.json["data"]["changeAutoUpgradeSettings"]["message"] is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 assert ( - response.json["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] is False + response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + is False ) - assert response.json["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True assert read_json(turned_off / "turned_off.json")["autoUpgrade"]["enable"] is False assert ( read_json(turned_off / "turned_off.json")["autoUpgrade"]["allowReboot"] is True @@ -747,14 +782,15 @@ def test_graphql_change_auto_upgrade_without_allow_reboot( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["success"] is True - assert response.json["data"]["changeAutoUpgradeSettings"]["message"] is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 assert ( - response.json["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] is True + response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + is True ) - assert response.json["data"]["changeAutoUpgradeSettings"]["allowReboot"] is False + assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is False assert read_json(turned_off / "turned_off.json")["autoUpgrade"]["enable"] is True assert ( read_json(turned_off / "turned_off.json")["autoUpgrade"]["allowReboot"] is False @@ -773,14 +809,15 @@ def test_graphql_change_auto_upgrade_with_empty_input(authorized_client, turned_ }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["success"] is True - assert response.json["data"]["changeAutoUpgradeSettings"]["message"] is not None - assert response.json["data"]["changeAutoUpgradeSettings"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 assert ( - response.json["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] is False + response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + is False ) - assert response.json["data"]["changeAutoUpgradeSettings"]["allowReboot"] is False + assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is False assert read_json(turned_off / "turned_off.json")["autoUpgrade"]["enable"] is False assert ( read_json(turned_off / "turned_off.json")["autoUpgrade"]["allowReboot"] is False @@ -788,7 +825,7 @@ def test_graphql_change_auto_upgrade_with_empty_input(authorized_client, turned_ API_PULL_SYSTEM_CONFIGURATION_MUTATION = """ -mutation testPullSystemConfiguration() { +mutation testPullSystemConfiguration { pullRepositoryChanges { success message @@ -807,7 +844,7 @@ def test_graphql_pull_system_configuration_unauthorized(client, mock_subprocess_ ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None assert mock_subprocess_popen.call_count == 0 @@ -823,10 +860,10 @@ def test_graphql_pull_system_configuration( ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["pullRepositoryChanges"]["success"] is True - assert response.json["data"]["pullRepositoryChanges"]["message"] is not None - assert response.json["data"]["pullRepositoryChanges"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["pullRepositoryChanges"]["success"] is True + assert response.json()["data"]["pullRepositoryChanges"]["message"] is not None + assert response.json()["data"]["pullRepositoryChanges"]["code"] == 200 assert mock_subprocess_popen.call_count == 1 assert mock_subprocess_popen.call_args[0][0] == ["git", "pull"] @@ -848,10 +885,10 @@ def test_graphql_pull_system_broken_repo( ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["pullRepositoryChanges"]["success"] is False - assert response.json["data"]["pullRepositoryChanges"]["message"] is not None - assert response.json["data"]["pullRepositoryChanges"]["code"] == 500 + assert response.json().get("data") is not None + assert response.json()["data"]["pullRepositoryChanges"]["success"] is False + assert response.json()["data"]["pullRepositoryChanges"]["message"] is not None + assert response.json()["data"]["pullRepositoryChanges"]["code"] == 500 assert mock_broken_service.call_count == 1 assert mock_os_chdir.call_count == 2 diff --git a/tests/test_graphql/test_system/turned_on.json b/tests/test_graphql/test_system/turned_on.json index 337e47f..821875b 100644 --- a/tests/test_graphql/test_system/turned_on.json +++ b/tests/test_graphql/test_system/turned_on.json @@ -33,7 +33,7 @@ }, "username": "tester", "gitea": { - "enable": false + "enable": true }, "ocserv": { "enable": true @@ -41,6 +41,9 @@ "pleroma": { "enable": true }, + "jitsi": { + "enable": true + }, "autoUpgrade": { "enable": true, "allowReboot": true @@ -49,4 +52,4 @@ "sshKeys": [ "ssh-rsa KEY test@pc" ] -} \ No newline at end of file +} diff --git a/tests/test_graphql/test_system_nixos_tasks.py b/tests/test_graphql/test_system_nixos_tasks.py index 601c353..3e823b6 100644 --- a/tests/test_graphql/test_system_nixos_tasks.py +++ b/tests/test_graphql/test_system_nixos_tasks.py @@ -72,7 +72,7 @@ def test_graphql_system_rebuild_unauthorized(client, mock_subprocess_popen): }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None assert mock_subprocess_popen.call_count == 0 @@ -85,10 +85,10 @@ def test_graphql_system_rebuild(authorized_client, mock_subprocess_popen): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["runSystemRebuild"]["success"] is True - assert response.json["data"]["runSystemRebuild"]["message"] is not None - assert response.json["data"]["runSystemRebuild"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["runSystemRebuild"]["success"] is True + assert response.json()["data"]["runSystemRebuild"]["message"] is not None + assert response.json()["data"]["runSystemRebuild"]["code"] == 200 assert mock_subprocess_popen.call_count == 1 assert mock_subprocess_popen.call_args[0][0] == [ "systemctl", @@ -117,7 +117,7 @@ def test_graphql_system_upgrade_unauthorized(client, mock_subprocess_popen): }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None assert mock_subprocess_popen.call_count == 0 @@ -130,10 +130,10 @@ def test_graphql_system_upgrade(authorized_client, mock_subprocess_popen): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["runSystemUpgrade"]["success"] is True - assert response.json["data"]["runSystemUpgrade"]["message"] is not None - assert response.json["data"]["runSystemUpgrade"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["runSystemUpgrade"]["success"] is True + assert response.json()["data"]["runSystemUpgrade"]["message"] is not None + assert response.json()["data"]["runSystemUpgrade"]["code"] == 200 assert mock_subprocess_popen.call_count == 1 assert mock_subprocess_popen.call_args[0][0] == [ "systemctl", @@ -162,7 +162,7 @@ def test_graphql_system_rollback_unauthorized(client, mock_subprocess_popen): }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None assert mock_subprocess_popen.call_count == 0 @@ -175,10 +175,10 @@ def test_graphql_system_rollback(authorized_client, mock_subprocess_popen): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert response.json["data"]["runSystemRollback"]["success"] is True - assert response.json["data"]["runSystemRollback"]["message"] is not None - assert response.json["data"]["runSystemRollback"]["code"] == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["runSystemRollback"]["success"] is True + assert response.json()["data"]["runSystemRollback"]["message"] is not None + assert response.json()["data"]["runSystemRollback"]["code"] == 200 assert mock_subprocess_popen.call_count == 1 assert mock_subprocess_popen.call_args[0][0] == [ "systemctl", @@ -207,7 +207,7 @@ def test_graphql_reboot_system_unauthorized(client, mock_subprocess_popen): ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None assert mock_subprocess_popen.call_count == 0 @@ -221,11 +221,11 @@ def test_graphql_reboot_system(authorized_client, mock_subprocess_popen): ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["rebootSystem"]["success"] is True - assert response.json["data"]["rebootSystem"]["message"] is not None - assert response.json["data"]["rebootSystem"]["code"] == 200 + assert response.json()["data"]["rebootSystem"]["success"] is True + assert response.json()["data"]["rebootSystem"]["message"] is not None + assert response.json()["data"]["rebootSystem"]["code"] == 200 assert mock_subprocess_popen.call_count == 1 assert mock_subprocess_popen.call_args[0][0] == ["reboot"] diff --git a/tests/test_graphql/test_users.py b/tests/test_graphql/test_users.py index afae1da..c36dcb2 100644 --- a/tests/test_graphql/test_users.py +++ b/tests/test_graphql/test_users.py @@ -119,53 +119,53 @@ allUsers { def test_graphql_get_users_unauthorized(client, some_users, mock_subprocess_popen): """Test wrong auth""" - response = client.get( + response = client.post( "/graphql", json={ "query": generate_users_query([API_USERS_INFO]), }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None def test_graphql_get_some_users(authorized_client, some_users, mock_subprocess_popen): - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ "query": generate_users_query([API_USERS_INFO]), }, ) assert response.status_code == 200 - assert response.json.get("data") is not None - assert len(response.json["data"]["users"]["allUsers"]) == 4 - assert response.json["data"]["users"]["allUsers"][0]["username"] == "user1" - assert response.json["data"]["users"]["allUsers"][0]["sshKeys"] == [ + assert response.json().get("data") is not None + assert len(response.json()["data"]["users"]["allUsers"]) == 4 + assert response.json()["data"]["users"]["allUsers"][0]["username"] == "user1" + assert response.json()["data"]["users"]["allUsers"][0]["sshKeys"] == [ "ssh-rsa KEY user1@pc" ] - assert response.json["data"]["users"]["allUsers"][1]["username"] == "user2" - assert response.json["data"]["users"]["allUsers"][1]["sshKeys"] == [] + assert response.json()["data"]["users"]["allUsers"][1]["username"] == "user2" + assert response.json()["data"]["users"]["allUsers"][1]["sshKeys"] == [] - assert response.json["data"]["users"]["allUsers"][3]["username"] == "tester" - assert response.json["data"]["users"]["allUsers"][3]["sshKeys"] == [ + assert response.json()["data"]["users"]["allUsers"][3]["username"] == "tester" + assert response.json()["data"]["users"]["allUsers"][3]["sshKeys"] == [ "ssh-rsa KEY test@pc" ] def test_graphql_get_no_users(authorized_client, no_users, mock_subprocess_popen): - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ "query": generate_users_query([API_USERS_INFO]), }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert len(response.json["data"]["users"]["allUsers"]) == 1 - assert response.json["data"]["users"]["allUsers"][0]["username"] == "tester" - assert response.json["data"]["users"]["allUsers"][0]["sshKeys"] == [ + assert len(response.json()["data"]["users"]["allUsers"]) == 1 + assert response.json()["data"]["users"]["allUsers"][0]["username"] == "tester" + assert response.json()["data"]["users"]["allUsers"][0]["sshKeys"] == [ "ssh-rsa KEY test@pc" ] @@ -183,7 +183,7 @@ query TestUsers($username: String!) { def test_graphql_get_one_user_unauthorized(client, one_user, mock_subprocess_popen): - response = client.get( + response = client.post( "/graphql", json={ "query": API_GET_USERS, @@ -193,12 +193,12 @@ def test_graphql_get_one_user_unauthorized(client, one_user, mock_subprocess_pop }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None def test_graphql_get_one_user(authorized_client, one_user, mock_subprocess_popen): - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ "query": API_GET_USERS, @@ -208,17 +208,17 @@ def test_graphql_get_one_user(authorized_client, one_user, mock_subprocess_popen }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert len(response.json["data"]["users"]["getUser"]) == 2 - assert response.json["data"]["users"]["getUser"]["username"] == "user1" - assert response.json["data"]["users"]["getUser"]["sshKeys"] == [ + assert len(response.json()["data"]["users"]["getUser"]) == 2 + assert response.json()["data"]["users"]["getUser"]["username"] == "user1" + assert response.json()["data"]["users"]["getUser"]["sshKeys"] == [ "ssh-rsa KEY user1@pc" ] def test_graphql_get_some_user(authorized_client, some_users, mock_subprocess_popen): - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ "query": API_GET_USERS, @@ -228,15 +228,15 @@ def test_graphql_get_some_user(authorized_client, some_users, mock_subprocess_po }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert len(response.json["data"]["users"]["getUser"]) == 2 - assert response.json["data"]["users"]["getUser"]["username"] == "user2" - assert response.json["data"]["users"]["getUser"]["sshKeys"] == [] + assert len(response.json()["data"]["users"]["getUser"]) == 2 + assert response.json()["data"]["users"]["getUser"]["username"] == "user2" + assert response.json()["data"]["users"]["getUser"]["sshKeys"] == [] def test_graphql_get_root_user(authorized_client, some_users, mock_subprocess_popen): - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ "query": API_GET_USERS, @@ -246,17 +246,17 @@ def test_graphql_get_root_user(authorized_client, some_users, mock_subprocess_po }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert len(response.json["data"]["users"]["getUser"]) == 2 - assert response.json["data"]["users"]["getUser"]["username"] == "root" - assert response.json["data"]["users"]["getUser"]["sshKeys"] == [ + assert len(response.json()["data"]["users"]["getUser"]) == 2 + assert response.json()["data"]["users"]["getUser"]["username"] == "root" + assert response.json()["data"]["users"]["getUser"]["sshKeys"] == [ "ssh-ed25519 KEY test@pc" ] def test_graphql_get_main_user(authorized_client, one_user, mock_subprocess_popen): - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ "query": API_GET_USERS, @@ -266,11 +266,11 @@ def test_graphql_get_main_user(authorized_client, one_user, mock_subprocess_pope }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert len(response.json["data"]["users"]["getUser"]) == 2 - assert response.json["data"]["users"]["getUser"]["username"] == "tester" - assert response.json["data"]["users"]["getUser"]["sshKeys"] == [ + assert len(response.json()["data"]["users"]["getUser"]) == 2 + assert response.json()["data"]["users"]["getUser"]["username"] == "tester" + assert response.json()["data"]["users"]["getUser"]["sshKeys"] == [ "ssh-rsa KEY test@pc" ] @@ -278,7 +278,7 @@ def test_graphql_get_main_user(authorized_client, one_user, mock_subprocess_pope def test_graphql_get_nonexistent_user( authorized_client, one_user, mock_subprocess_popen ): - response = authorized_client.get( + response = authorized_client.post( "/graphql", json={ "query": API_GET_USERS, @@ -288,9 +288,9 @@ def test_graphql_get_nonexistent_user( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["users"]["getUser"] is None + assert response.json()["data"]["users"]["getUser"] is None API_CREATE_USERS_MUTATION = """ @@ -322,7 +322,7 @@ def test_graphql_add_user_unauthorize(client, one_user, mock_subprocess_popen): }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None def test_graphql_add_user(authorized_client, one_user, mock_subprocess_popen): @@ -339,14 +339,14 @@ def test_graphql_add_user(authorized_client, one_user, mock_subprocess_popen): }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["createUser"]["message"] is not None - assert response.json["data"]["createUser"]["code"] == 201 - assert response.json["data"]["createUser"]["success"] is True + assert response.json()["data"]["createUser"]["message"] is not None + assert response.json()["data"]["createUser"]["code"] == 201 + assert response.json()["data"]["createUser"]["success"] is True - assert response.json["data"]["createUser"]["user"]["username"] == "user2" - assert response.json["data"]["createUser"]["user"]["sshKeys"] == [] + assert response.json()["data"]["createUser"]["user"]["username"] == "user2" + assert response.json()["data"]["createUser"]["user"]["sshKeys"] == [] def test_graphql_add_undefined_settings( @@ -365,14 +365,14 @@ def test_graphql_add_undefined_settings( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["createUser"]["message"] is not None - assert response.json["data"]["createUser"]["code"] == 201 - assert response.json["data"]["createUser"]["success"] is True + assert response.json()["data"]["createUser"]["message"] is not None + assert response.json()["data"]["createUser"]["code"] == 201 + assert response.json()["data"]["createUser"]["success"] is True - assert response.json["data"]["createUser"]["user"]["username"] == "user2" - assert response.json["data"]["createUser"]["user"]["sshKeys"] == [] + assert response.json()["data"]["createUser"]["user"]["username"] == "user2" + assert response.json()["data"]["createUser"]["user"]["sshKeys"] == [] def test_graphql_add_without_password( @@ -391,13 +391,13 @@ def test_graphql_add_without_password( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["createUser"]["message"] is not None - assert response.json["data"]["createUser"]["code"] == 400 - assert response.json["data"]["createUser"]["success"] is False + assert response.json()["data"]["createUser"]["message"] is not None + assert response.json()["data"]["createUser"]["code"] == 400 + assert response.json()["data"]["createUser"]["success"] is False - assert response.json["data"]["createUser"]["user"] is None + assert response.json()["data"]["createUser"]["user"] is None def test_graphql_add_without_both(authorized_client, one_user, mock_subprocess_popen): @@ -414,13 +414,13 @@ def test_graphql_add_without_both(authorized_client, one_user, mock_subprocess_p }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["createUser"]["message"] is not None - assert response.json["data"]["createUser"]["code"] == 400 - assert response.json["data"]["createUser"]["success"] is False + assert response.json()["data"]["createUser"]["message"] is not None + assert response.json()["data"]["createUser"]["code"] == 400 + assert response.json()["data"]["createUser"]["success"] is False - assert response.json["data"]["createUser"]["user"] is None + assert response.json()["data"]["createUser"]["user"] is None @pytest.mark.parametrize("username", invalid_usernames) @@ -440,13 +440,13 @@ def test_graphql_add_system_username( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["createUser"]["message"] is not None - assert response.json["data"]["createUser"]["code"] == 409 - assert response.json["data"]["createUser"]["success"] is False + assert response.json()["data"]["createUser"]["message"] is not None + assert response.json()["data"]["createUser"]["code"] == 409 + assert response.json()["data"]["createUser"]["success"] is False - assert response.json["data"]["createUser"]["user"] is None + assert response.json()["data"]["createUser"]["user"] is None def test_graphql_add_existing_user(authorized_client, one_user, mock_subprocess_popen): @@ -463,15 +463,15 @@ def test_graphql_add_existing_user(authorized_client, one_user, mock_subprocess_ }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["createUser"]["message"] is not None - assert response.json["data"]["createUser"]["code"] == 409 - assert response.json["data"]["createUser"]["success"] is False + assert response.json()["data"]["createUser"]["message"] is not None + assert response.json()["data"]["createUser"]["code"] == 409 + assert response.json()["data"]["createUser"]["success"] is False - assert response.json["data"]["createUser"]["user"]["username"] == "user1" + assert response.json()["data"]["createUser"]["user"]["username"] == "user1" assert ( - response.json["data"]["createUser"]["user"]["sshKeys"][0] + response.json()["data"]["createUser"]["user"]["sshKeys"][0] == "ssh-rsa KEY user1@pc" ) @@ -490,15 +490,15 @@ def test_graphql_add_main_user(authorized_client, one_user, mock_subprocess_pope }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["createUser"]["message"] is not None - assert response.json["data"]["createUser"]["code"] == 409 - assert response.json["data"]["createUser"]["success"] is False + assert response.json()["data"]["createUser"]["message"] is not None + assert response.json()["data"]["createUser"]["code"] == 409 + assert response.json()["data"]["createUser"]["success"] is False - assert response.json["data"]["createUser"]["user"]["username"] == "tester" + assert response.json()["data"]["createUser"]["user"]["username"] == "tester" assert ( - response.json["data"]["createUser"]["user"]["sshKeys"][0] + response.json()["data"]["createUser"]["user"]["sshKeys"][0] == "ssh-rsa KEY test@pc" ) @@ -517,13 +517,13 @@ def test_graphql_add_long_username(authorized_client, one_user, mock_subprocess_ }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["createUser"]["message"] is not None - assert response.json["data"]["createUser"]["code"] == 400 - assert response.json["data"]["createUser"]["success"] is False + assert response.json()["data"]["createUser"]["message"] is not None + assert response.json()["data"]["createUser"]["code"] == 400 + assert response.json()["data"]["createUser"]["success"] is False - assert response.json["data"]["createUser"]["user"] is None + assert response.json()["data"]["createUser"]["user"] is None @pytest.mark.parametrize("username", ["", "1", "фыр", "user1@", "^-^"]) @@ -543,13 +543,13 @@ def test_graphql_add_invalid_username( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["createUser"]["message"] is not None - assert response.json["data"]["createUser"]["code"] == 400 - assert response.json["data"]["createUser"]["success"] is False + assert response.json()["data"]["createUser"]["message"] is not None + assert response.json()["data"]["createUser"]["code"] == 400 + assert response.json()["data"]["createUser"]["success"] is False - assert response.json["data"]["createUser"]["user"] is None + assert response.json()["data"]["createUser"]["user"] is None API_DELETE_USER_MUTATION = """ @@ -572,7 +572,7 @@ def test_graphql_delete_user_unauthorized(client, some_users, mock_subprocess_po }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None def test_graphql_delete_user(authorized_client, some_users, mock_subprocess_popen): @@ -584,11 +584,11 @@ def test_graphql_delete_user(authorized_client, some_users, mock_subprocess_pope }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["deleteUser"]["code"] == 200 - assert response.json["data"]["deleteUser"]["message"] is not None - assert response.json["data"]["deleteUser"]["success"] is True + assert response.json()["data"]["deleteUser"]["code"] == 200 + assert response.json()["data"]["deleteUser"]["message"] is not None + assert response.json()["data"]["deleteUser"]["success"] is True @pytest.mark.parametrize("username", ["", "def"]) @@ -603,11 +603,11 @@ def test_graphql_delete_nonexistent_users( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["deleteUser"]["code"] == 404 - assert response.json["data"]["deleteUser"]["message"] is not None - assert response.json["data"]["deleteUser"]["success"] is False + assert response.json()["data"]["deleteUser"]["code"] == 404 + assert response.json()["data"]["deleteUser"]["message"] is not None + assert response.json()["data"]["deleteUser"]["success"] is False @pytest.mark.parametrize("username", invalid_usernames) @@ -622,14 +622,14 @@ def test_graphql_delete_system_users( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None assert ( - response.json["data"]["deleteUser"]["code"] == 404 - or response.json["data"]["deleteUser"]["code"] == 400 + response.json()["data"]["deleteUser"]["code"] == 404 + or response.json()["data"]["deleteUser"]["code"] == 400 ) - assert response.json["data"]["deleteUser"]["message"] is not None - assert response.json["data"]["deleteUser"]["success"] is False + assert response.json()["data"]["deleteUser"]["message"] is not None + assert response.json()["data"]["deleteUser"]["success"] is False def test_graphql_delete_main_user(authorized_client, some_users, mock_subprocess_popen): @@ -641,11 +641,11 @@ def test_graphql_delete_main_user(authorized_client, some_users, mock_subprocess }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["deleteUser"]["code"] == 400 - assert response.json["data"]["deleteUser"]["message"] is not None - assert response.json["data"]["deleteUser"]["success"] is False + assert response.json()["data"]["deleteUser"]["code"] == 400 + assert response.json()["data"]["deleteUser"]["message"] is not None + assert response.json()["data"]["deleteUser"]["success"] is False API_UPDATE_USER_MUTATION = """ @@ -677,7 +677,7 @@ def test_graphql_update_user_unauthorized(client, some_users, mock_subprocess_po }, ) assert response.status_code == 200 - assert response.json.get("data") is None + assert response.json().get("data") is None def test_graphql_update_user(authorized_client, some_users, mock_subprocess_popen): @@ -694,14 +694,14 @@ def test_graphql_update_user(authorized_client, some_users, mock_subprocess_pope }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["updateUser"]["code"] == 200 - assert response.json["data"]["updateUser"]["message"] is not None - assert response.json["data"]["updateUser"]["success"] is True + assert response.json()["data"]["updateUser"]["code"] == 200 + assert response.json()["data"]["updateUser"]["message"] is not None + assert response.json()["data"]["updateUser"]["success"] is True - assert response.json["data"]["updateUser"]["user"]["username"] == "user1" - assert response.json["data"]["updateUser"]["user"]["sshKeys"] == [ + assert response.json()["data"]["updateUser"]["user"]["username"] == "user1" + assert response.json()["data"]["updateUser"]["user"]["sshKeys"] == [ "ssh-rsa KEY user1@pc" ] assert mock_subprocess_popen.call_count == 1 @@ -723,11 +723,11 @@ def test_graphql_update_nonexistent_user( }, ) assert response.status_code == 200 - assert response.json.get("data") is not None + assert response.json().get("data") is not None - assert response.json["data"]["updateUser"]["code"] == 404 - assert response.json["data"]["updateUser"]["message"] is not None - assert response.json["data"]["updateUser"]["success"] is False + assert response.json()["data"]["updateUser"]["code"] == 404 + assert response.json()["data"]["updateUser"]["message"] is not None + assert response.json()["data"]["updateUser"]["success"] is False - assert response.json["data"]["updateUser"]["user"] is None + assert response.json()["data"]["updateUser"]["user"] is None assert mock_subprocess_popen.call_count == 1 diff --git a/tests/test_jobs.py b/tests/test_jobs.py new file mode 100644 index 0000000..87f1386 --- /dev/null +++ b/tests/test_jobs.py @@ -0,0 +1,50 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +import json +import pytest + +from selfprivacy_api.utils import WriteUserData, ReadUserData +from selfprivacy_api.jobs import Jobs, JobStatus + + +def test_jobs(authorized_client, jobs_file, shared_datadir): + jobs = Jobs() + assert jobs.get_jobs() == [] + + test_job = jobs.add( + type_id="test", + name="Test job", + description="This is a test job.", + status=JobStatus.CREATED, + status_text="Status text", + progress=0, + ) + + assert jobs.get_jobs() == [test_job] + + jobs.update( + job=test_job, + status=JobStatus.RUNNING, + status_text="Status text", + progress=50, + ) + + assert jobs.get_jobs() == [test_job] + + +@pytest.fixture +def mock_subprocess_run(mocker): + mock = mocker.patch("subprocess.run", autospec=True) + return mock + + +@pytest.fixture +def mock_shutil_move(mocker): + mock = mocker.patch("shutil.move", autospec=True) + return mock + + +@pytest.fixture +def mock_shutil_chown(mocker): + mock = mocker.patch("shutil.chown", autospec=True) + return mock diff --git a/tests/test_network_utils.py b/tests/test_network_utils.py index a7c1511..0662584 100644 --- a/tests/test_network_utils.py +++ b/tests/test_network_utils.py @@ -2,6 +2,7 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument # pylint: disable=missing-function-docstring +import subprocess import pytest from selfprivacy_api.utils.network import get_ip4, get_ip6 @@ -30,6 +31,28 @@ def ip_process_mock(mocker): return mock +@pytest.fixture +def failed_ip_process_mock(mocker): + mock = mocker.patch( + "subprocess.check_output", + autospec=True, + return_value=FAILED_OUTPUT_STRING, + ) + return mock + + +@pytest.fixture +def failed_subprocess_call(mocker): + mock = mocker.patch( + "subprocess.check_output", + autospec=True, + side_effect=subprocess.CalledProcessError( + returncode=1, cmd=["ip", "addr", "show", "dev", "eth0"] + ), + ) + return mock + + def test_get_ip4(ip_process_mock): """Test get IPv4 address""" ip4 = get_ip4() @@ -40,3 +63,23 @@ def test_get_ip6(ip_process_mock): """Test get IPv6 address""" ip6 = get_ip6() assert ip6 == "fe80::9400:ff:fef1:34ae" + + +def test_failed_get_ip4(failed_ip_process_mock): + ip4 = get_ip4() + assert ip4 is "" + + +def test_failed_get_ip6(failed_ip_process_mock): + ip6 = get_ip6() + assert ip6 is "" + + +def test_failed_subprocess_get_ip4(failed_subprocess_call): + ip4 = get_ip4() + assert ip4 is "" + + +def test_failed_subprocess_get_ip6(failed_subprocess_call): + ip6 = get_ip6() + assert ip6 is "" diff --git a/tests/test_rest_endpoints/data/jobs.json b/tests/test_rest_endpoints/data/jobs.json new file mode 100644 index 0000000..0967ef4 --- /dev/null +++ b/tests/test_rest_endpoints/data/jobs.json @@ -0,0 +1 @@ +{} diff --git a/tests/test_rest_endpoints/data/tokens.json b/tests/test_rest_endpoints/data/tokens.json new file mode 100644 index 0000000..9be9d02 --- /dev/null +++ b/tests/test_rest_endpoints/data/tokens.json @@ -0,0 +1,14 @@ +{ + "tokens": [ + { + "token": "TEST_TOKEN", + "name": "test_token", + "date": "2022-01-14 08:31:10.789314" + }, + { + "token": "TEST_TOKEN2", + "name": "test_token2", + "date": "2022-01-14 08:31:10.789314" + } + ] +} \ No newline at end of file diff --git a/tests/services/data/tokens.json b/tests/test_rest_endpoints/services/data/tokens.json similarity index 100% rename from tests/services/data/tokens.json rename to tests/test_rest_endpoints/services/data/tokens.json diff --git a/tests/services/test_bitwarden.py b/tests/test_rest_endpoints/services/test_bitwarden.py similarity index 100% rename from tests/services/test_bitwarden.py rename to tests/test_rest_endpoints/services/test_bitwarden.py diff --git a/tests/services/test_bitwarden/enable_undefined.json b/tests/test_rest_endpoints/services/test_bitwarden/enable_undefined.json similarity index 100% rename from tests/services/test_bitwarden/enable_undefined.json rename to tests/test_rest_endpoints/services/test_bitwarden/enable_undefined.json diff --git a/tests/services/test_bitwarden/turned_off.json b/tests/test_rest_endpoints/services/test_bitwarden/turned_off.json similarity index 100% rename from tests/services/test_bitwarden/turned_off.json rename to tests/test_rest_endpoints/services/test_bitwarden/turned_off.json diff --git a/tests/services/test_bitwarden/turned_on.json b/tests/test_rest_endpoints/services/test_bitwarden/turned_on.json similarity index 100% rename from tests/services/test_bitwarden/turned_on.json rename to tests/test_rest_endpoints/services/test_bitwarden/turned_on.json diff --git a/tests/services/test_bitwarden/undefined.json b/tests/test_rest_endpoints/services/test_bitwarden/undefined.json similarity index 100% rename from tests/services/test_bitwarden/undefined.json rename to tests/test_rest_endpoints/services/test_bitwarden/undefined.json diff --git a/tests/services/test_gitea.py b/tests/test_rest_endpoints/services/test_gitea.py similarity index 100% rename from tests/services/test_gitea.py rename to tests/test_rest_endpoints/services/test_gitea.py diff --git a/tests/services/test_gitea/enable_undefined.json b/tests/test_rest_endpoints/services/test_gitea/enable_undefined.json similarity index 100% rename from tests/services/test_gitea/enable_undefined.json rename to tests/test_rest_endpoints/services/test_gitea/enable_undefined.json diff --git a/tests/services/test_gitea/turned_off.json b/tests/test_rest_endpoints/services/test_gitea/turned_off.json similarity index 100% rename from tests/services/test_gitea/turned_off.json rename to tests/test_rest_endpoints/services/test_gitea/turned_off.json diff --git a/tests/services/test_gitea/turned_on.json b/tests/test_rest_endpoints/services/test_gitea/turned_on.json similarity index 100% rename from tests/services/test_gitea/turned_on.json rename to tests/test_rest_endpoints/services/test_gitea/turned_on.json diff --git a/tests/services/test_gitea/undefined.json b/tests/test_rest_endpoints/services/test_gitea/undefined.json similarity index 100% rename from tests/services/test_gitea/undefined.json rename to tests/test_rest_endpoints/services/test_gitea/undefined.json diff --git a/tests/services/test_mailserver.py b/tests/test_rest_endpoints/services/test_mailserver.py similarity index 91% rename from tests/services/test_mailserver.py rename to tests/test_rest_endpoints/services/test_mailserver.py index a9e5f12..36cf615 100644 --- a/tests/services/test_mailserver.py +++ b/tests/test_rest_endpoints/services/test_mailserver.py @@ -25,7 +25,7 @@ class NoFileMock(ProcessMock): def mock_subproccess_popen(mocker): mock = mocker.patch("subprocess.Popen", autospec=True, return_value=ProcessMock) mocker.patch( - "selfprivacy_api.resources.services.mailserver.get_domain", + "selfprivacy_api.rest.services.get_domain", autospec=True, return_value="example.com", ) @@ -37,7 +37,7 @@ def mock_subproccess_popen(mocker): def mock_no_file(mocker): mock = mocker.patch("subprocess.Popen", autospec=True, return_value=NoFileMock) mocker.patch( - "selfprivacy_api.resources.services.mailserver.get_domain", + "selfprivacy_api.rest.services.get_domain", autospec=True, return_value="example.com", ) @@ -67,7 +67,7 @@ def test_dkim_key(authorized_client, mock_subproccess_popen): """Test DKIM key""" response = authorized_client.get("/services/mailserver/dkim") assert response.status_code == 200 - assert base64.b64decode(response.data) == b"I am a DKIM key" + assert base64.b64decode(response.text) == b"I am a DKIM key" assert mock_subproccess_popen.call_args[0][0] == [ "cat", "/var/dkim/example.com.selector.txt", diff --git a/tests/services/test_nextcloud.py b/tests/test_rest_endpoints/services/test_nextcloud.py similarity index 100% rename from tests/services/test_nextcloud.py rename to tests/test_rest_endpoints/services/test_nextcloud.py diff --git a/tests/services/test_nextcloud/enable_undefined.json b/tests/test_rest_endpoints/services/test_nextcloud/enable_undefined.json similarity index 100% rename from tests/services/test_nextcloud/enable_undefined.json rename to tests/test_rest_endpoints/services/test_nextcloud/enable_undefined.json diff --git a/tests/services/test_nextcloud/turned_off.json b/tests/test_rest_endpoints/services/test_nextcloud/turned_off.json similarity index 100% rename from tests/services/test_nextcloud/turned_off.json rename to tests/test_rest_endpoints/services/test_nextcloud/turned_off.json diff --git a/tests/services/test_nextcloud/turned_on.json b/tests/test_rest_endpoints/services/test_nextcloud/turned_on.json similarity index 100% rename from tests/services/test_nextcloud/turned_on.json rename to tests/test_rest_endpoints/services/test_nextcloud/turned_on.json diff --git a/tests/services/test_nextcloud/undefined.json b/tests/test_rest_endpoints/services/test_nextcloud/undefined.json similarity index 100% rename from tests/services/test_nextcloud/undefined.json rename to tests/test_rest_endpoints/services/test_nextcloud/undefined.json diff --git a/tests/services/test_ocserv.py b/tests/test_rest_endpoints/services/test_ocserv.py similarity index 100% rename from tests/services/test_ocserv.py rename to tests/test_rest_endpoints/services/test_ocserv.py diff --git a/tests/services/test_ocserv/enable_undefined.json b/tests/test_rest_endpoints/services/test_ocserv/enable_undefined.json similarity index 100% rename from tests/services/test_ocserv/enable_undefined.json rename to tests/test_rest_endpoints/services/test_ocserv/enable_undefined.json diff --git a/tests/services/test_ocserv/turned_off.json b/tests/test_rest_endpoints/services/test_ocserv/turned_off.json similarity index 100% rename from tests/services/test_ocserv/turned_off.json rename to tests/test_rest_endpoints/services/test_ocserv/turned_off.json diff --git a/tests/services/test_ocserv/turned_on.json b/tests/test_rest_endpoints/services/test_ocserv/turned_on.json similarity index 100% rename from tests/services/test_ocserv/turned_on.json rename to tests/test_rest_endpoints/services/test_ocserv/turned_on.json diff --git a/tests/services/test_ocserv/undefined.json b/tests/test_rest_endpoints/services/test_ocserv/undefined.json similarity index 100% rename from tests/services/test_ocserv/undefined.json rename to tests/test_rest_endpoints/services/test_ocserv/undefined.json diff --git a/tests/services/test_pleroma.py b/tests/test_rest_endpoints/services/test_pleroma.py similarity index 100% rename from tests/services/test_pleroma.py rename to tests/test_rest_endpoints/services/test_pleroma.py diff --git a/tests/services/test_pleroma/enable_undefined.json b/tests/test_rest_endpoints/services/test_pleroma/enable_undefined.json similarity index 100% rename from tests/services/test_pleroma/enable_undefined.json rename to tests/test_rest_endpoints/services/test_pleroma/enable_undefined.json diff --git a/tests/services/test_pleroma/turned_off.json b/tests/test_rest_endpoints/services/test_pleroma/turned_off.json similarity index 100% rename from tests/services/test_pleroma/turned_off.json rename to tests/test_rest_endpoints/services/test_pleroma/turned_off.json diff --git a/tests/services/test_pleroma/turned_on.json b/tests/test_rest_endpoints/services/test_pleroma/turned_on.json similarity index 100% rename from tests/services/test_pleroma/turned_on.json rename to tests/test_rest_endpoints/services/test_pleroma/turned_on.json diff --git a/tests/services/test_pleroma/undefined.json b/tests/test_rest_endpoints/services/test_pleroma/undefined.json similarity index 100% rename from tests/services/test_pleroma/undefined.json rename to tests/test_rest_endpoints/services/test_pleroma/undefined.json diff --git a/tests/services/test_restic.py b/tests/test_rest_endpoints/services/test_restic.py similarity index 93% rename from tests/services/test_restic.py rename to tests/test_rest_endpoints/services/test_restic.py index 913362f..9502be5 100644 --- a/tests/services/test_restic.py +++ b/tests/test_rest_endpoints/services/test_restic.py @@ -43,7 +43,7 @@ class ResticControllerMock: @pytest.fixture def mock_restic_controller(mocker): mock = mocker.patch( - "selfprivacy_api.resources.services.restic.ResticController", + "selfprivacy_api.rest.services.ResticController", autospec=True, return_value=ResticControllerMock, ) @@ -60,7 +60,7 @@ class ResticControllerMockNoKey: @pytest.fixture def mock_restic_controller_no_key(mocker): mock = mocker.patch( - "selfprivacy_api.resources.services.restic.ResticController", + "selfprivacy_api.rest.services.ResticController", autospec=True, return_value=ResticControllerMockNoKey, ) @@ -77,7 +77,7 @@ class ResticControllerNotInitialized: @pytest.fixture def mock_restic_controller_not_initialized(mocker): mock = mocker.patch( - "selfprivacy_api.resources.services.restic.ResticController", + "selfprivacy_api.rest.services.ResticController", autospec=True, return_value=ResticControllerNotInitialized, ) @@ -94,7 +94,7 @@ class ResticControllerInitializing: @pytest.fixture def mock_restic_controller_initializing(mocker): mock = mocker.patch( - "selfprivacy_api.resources.services.restic.ResticController", + "selfprivacy_api.rest.services.ResticController", autospec=True, return_value=ResticControllerInitializing, ) @@ -111,7 +111,7 @@ class ResticControllerBackingUp: @pytest.fixture def mock_restic_controller_backing_up(mocker): mock = mocker.patch( - "selfprivacy_api.resources.services.restic.ResticController", + "selfprivacy_api.rest.services.ResticController", autospec=True, return_value=ResticControllerBackingUp, ) @@ -128,7 +128,7 @@ class ResticControllerError: @pytest.fixture def mock_restic_controller_error(mocker): mock = mocker.patch( - "selfprivacy_api.resources.services.restic.ResticController", + "selfprivacy_api.rest.services.ResticController", autospec=True, return_value=ResticControllerError, ) @@ -145,7 +145,7 @@ class ResticControllerRestoring: @pytest.fixture def mock_restic_controller_restoring(mocker): mock = mocker.patch( - "selfprivacy_api.resources.services.restic.ResticController", + "selfprivacy_api.rest.services.ResticController", autospec=True, return_value=ResticControllerRestoring, ) @@ -154,9 +154,7 @@ def mock_restic_controller_restoring(mocker): @pytest.fixture def mock_restic_tasks(mocker): - mock = mocker.patch( - "selfprivacy_api.resources.services.restic.restic_tasks", autospec=True - ) + mock = mocker.patch("selfprivacy_api.rest.services.restic_tasks", autospec=True) return mock @@ -197,7 +195,7 @@ def test_get_snapshots_unauthorized(client, mock_restic_controller, mock_restic_ def test_get_snapshots(authorized_client, mock_restic_controller, mock_restic_tasks): response = authorized_client.get("/services/restic/backup/list") assert response.status_code == 200 - assert response.get_json() == MOCKED_SNAPSHOTS + assert response.json() == MOCKED_SNAPSHOTS def test_create_backup_unauthorized(client, mock_restic_controller, mock_restic_tasks): @@ -247,7 +245,7 @@ def test_check_backup_status( ): response = authorized_client.get("/services/restic/backup/status") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "status": "INITIALIZED", "progress": 0, "error_message": None, @@ -259,7 +257,7 @@ def test_check_backup_status_no_key( ): response = authorized_client.get("/services/restic/backup/status") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "status": "NO_KEY", "progress": 0, "error_message": None, @@ -271,7 +269,7 @@ def test_check_backup_status_not_initialized( ): response = authorized_client.get("/services/restic/backup/status") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "status": "NOT_INITIALIZED", "progress": 0, "error_message": None, @@ -283,7 +281,7 @@ def test_check_backup_status_initializing( ): response = authorized_client.get("/services/restic/backup/status") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "status": "INITIALIZING", "progress": 0, "error_message": None, @@ -295,7 +293,7 @@ def test_check_backup_status_backing_up( ): response = authorized_client.get("/services/restic/backup/status") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "status": "BACKING_UP", "progress": 0.42, "error_message": None, @@ -307,7 +305,7 @@ def test_check_backup_status_error( ): response = authorized_client.get("/services/restic/backup/status") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "status": "ERROR", "progress": 0, "error_message": "Error message", @@ -319,7 +317,7 @@ def test_check_backup_status_restoring( ): response = authorized_client.get("/services/restic/backup/status") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "status": "RESTORING", "progress": 0, "error_message": None, @@ -346,7 +344,7 @@ def test_backup_restore_without_backup_id( authorized_client, mock_restic_controller, mock_restic_tasks ): response = authorized_client.put("/services/restic/backup/restore", json={}) - assert response.status_code == 400 + assert response.status_code == 422 assert mock_restic_tasks.restore_from_backup.call_count == 0 @@ -440,7 +438,7 @@ def test_set_backblaze_config_without_arguments( authorized_client, mock_restic_controller, mock_restic_tasks, some_settings ): response = authorized_client.put("/services/restic/backblaze/config") - assert response.status_code == 400 + assert response.status_code == 422 assert mock_restic_tasks.update_keys_from_userdata.call_count == 0 @@ -451,7 +449,7 @@ def test_set_backblaze_config_without_all_values( "/services/restic/backblaze/config", json={"accountId": "123", "applicationKey": "456"}, ) - assert response.status_code == 400 + assert response.status_code == 422 assert mock_restic_tasks.update_keys_from_userdata.call_count == 0 diff --git a/tests/services/test_restic/no_values.json b/tests/test_rest_endpoints/services/test_restic/no_values.json similarity index 100% rename from tests/services/test_restic/no_values.json rename to tests/test_rest_endpoints/services/test_restic/no_values.json diff --git a/tests/services/test_restic/some_values.json b/tests/test_rest_endpoints/services/test_restic/some_values.json similarity index 100% rename from tests/services/test_restic/some_values.json rename to tests/test_rest_endpoints/services/test_restic/some_values.json diff --git a/tests/services/test_restic/undefined.json b/tests/test_rest_endpoints/services/test_restic/undefined.json similarity index 100% rename from tests/services/test_restic/undefined.json rename to tests/test_rest_endpoints/services/test_restic/undefined.json diff --git a/tests/services/test_services.py b/tests/test_rest_endpoints/services/test_services.py similarity index 70% rename from tests/services/test_services.py rename to tests/test_rest_endpoints/services/test_services.py index aed48fb..1108e8c 100644 --- a/tests/services/test_services.py +++ b/tests/test_rest_endpoints/services/test_services.py @@ -9,76 +9,81 @@ def read_json(file_path): def call_args_asserts(mocked_object): - assert mocked_object.call_count == 8 + assert mocked_object.call_count == 7 assert mocked_object.call_args_list[0][0][0] == [ "systemctl", - "status", + "show", "dovecot2.service", ] assert mocked_object.call_args_list[1][0][0] == [ "systemctl", - "status", + "show", "postfix.service", ] assert mocked_object.call_args_list[2][0][0] == [ "systemctl", - "status", - "nginx.service", + "show", + "vaultwarden.service", ] assert mocked_object.call_args_list[3][0][0] == [ "systemctl", - "status", - "vaultwarden.service", + "show", + "gitea.service", ] assert mocked_object.call_args_list[4][0][0] == [ "systemctl", - "status", - "gitea.service", + "show", + "phpfpm-nextcloud.service", ] assert mocked_object.call_args_list[5][0][0] == [ "systemctl", - "status", - "phpfpm-nextcloud.service", + "show", + "ocserv.service", ] assert mocked_object.call_args_list[6][0][0] == [ "systemctl", - "status", - "ocserv.service", - ] - assert mocked_object.call_args_list[7][0][0] == [ - "systemctl", - "status", + "show", "pleroma.service", ] -class ProcessMock: - """Mock subprocess.Popen""" +SUCCESSFUL_STATUS = b""" +Type=oneshot +ExitType=main +Restart=no +NotifyAccess=none +RestartUSec=100ms +LoadState=loaded +ActiveState=active +FreezerState=running +SubState=exited +""" - def __init__(self, args, **kwargs): - self.args = args - self.kwargs = kwargs - - def communicate(): - return (b"", None) - - returncode = 0 - - -class BrokenServiceMock(ProcessMock): - returncode = 3 +FAILED_STATUS = b""" +Type=oneshot +ExitType=main +Restart=no +NotifyAccess=none +RestartUSec=100ms +LoadState=loaded +ActiveState=failed +FreezerState=running +SubState=exited +""" @pytest.fixture def mock_subproccess_popen(mocker): - mock = mocker.patch("subprocess.Popen", autospec=True, return_value=ProcessMock) + mock = mocker.patch( + "subprocess.check_output", autospec=True, return_value=SUCCESSFUL_STATUS + ) return mock @pytest.fixture def mock_broken_service(mocker): mock = mocker.patch( - "subprocess.Popen", autospec=True, return_value=BrokenServiceMock + "subprocess.check_output", autospec=True, return_value=FAILED_STATUS ) return mock @@ -104,7 +109,7 @@ def test_illegal_methods(authorized_client, mock_subproccess_popen): def test_dkim_key(authorized_client, mock_subproccess_popen): response = authorized_client.get("/services/status") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "imap": 0, "smtp": 0, "http": 0, @@ -120,14 +125,14 @@ def test_dkim_key(authorized_client, mock_subproccess_popen): def test_no_dkim_key(authorized_client, mock_broken_service): response = authorized_client.get("/services/status") assert response.status_code == 200 - assert response.get_json() == { - "imap": 3, - "smtp": 3, - "http": 3, - "bitwarden": 3, - "gitea": 3, - "nextcloud": 3, - "ocserv": 3, - "pleroma": 3, + assert response.json() == { + "imap": 1, + "smtp": 1, + "http": 0, + "bitwarden": 1, + "gitea": 1, + "nextcloud": 1, + "ocserv": 1, + "pleroma": 1, } call_args_asserts(mock_broken_service) diff --git a/tests/services/test_ssh.py b/tests/test_rest_endpoints/services/test_ssh.py similarity index 91% rename from tests/services/test_ssh.py rename to tests/test_rest_endpoints/services/test_ssh.py index 5975811..a17bdab 100644 --- a/tests/services/test_ssh.py +++ b/tests/test_rest_endpoints/services/test_ssh.py @@ -95,14 +95,18 @@ def some_users(mocker, datadir): ## TEST 401 ###################################################### -@pytest.mark.parametrize( - "endpoint", ["ssh", "ssh/enable", "ssh/key/send", "ssh/keys/user"] -) +@pytest.mark.parametrize("endpoint", ["ssh/enable", "ssh/keys/user"]) def test_unauthorized(client, ssh_off, endpoint): response = client.post(f"/services/{endpoint}") assert response.status_code == 401 +@pytest.mark.parametrize("endpoint", ["ssh", "ssh/key/send"]) +def test_unauthorized_put(client, ssh_off, endpoint): + response = client.put(f"/services/{endpoint}") + assert response.status_code == 401 + + ## TEST ENABLE ###################################################### @@ -133,31 +137,31 @@ def test_legacy_enable_when_enabled(authorized_client, ssh_on): def test_get_current_settings_ssh_off(authorized_client, ssh_off): response = authorized_client.get("/services/ssh") assert response.status_code == 200 - assert response.json == {"enable": False, "passwordAuthentication": True} + assert response.json() == {"enable": False, "passwordAuthentication": True} def test_get_current_settings_ssh_on(authorized_client, ssh_on): response = authorized_client.get("/services/ssh") assert response.status_code == 200 - assert response.json == {"enable": True, "passwordAuthentication": True} + assert response.json() == {"enable": True, "passwordAuthentication": True} def test_get_current_settings_all_off(authorized_client, all_off): response = authorized_client.get("/services/ssh") assert response.status_code == 200 - assert response.json == {"enable": False, "passwordAuthentication": False} + assert response.json() == {"enable": False, "passwordAuthentication": False} def test_get_current_settings_undefined(authorized_client, undefined_settings): response = authorized_client.get("/services/ssh") assert response.status_code == 200 - assert response.json == {"enable": True, "passwordAuthentication": True} + assert response.json() == {"enable": True, "passwordAuthentication": True} def test_get_current_settings_mostly_undefined(authorized_client, undefined_values): response = authorized_client.get("/services/ssh") assert response.status_code == 200 - assert response.json == {"enable": True, "passwordAuthentication": True} + assert response.json() == {"enable": True, "passwordAuthentication": True} ## PUT ON /ssh ###################################################### @@ -275,29 +279,22 @@ def test_add_invalid_root_key(authorized_client, ssh_on): ## /ssh/keys/{user} ###################################################### -def test_add_root_key_via_wrong_endpoint(authorized_client, ssh_on): - response = authorized_client.post( - "/services/ssh/keys/root", json={"public_key": "ssh-rsa KEY test@pc"} - ) - assert response.status_code == 400 - - def test_get_root_key(authorized_client, root_and_admin_have_keys): response = authorized_client.get("/services/ssh/keys/root") assert response.status_code == 200 - assert response.json == ["ssh-ed25519 KEY test@pc"] + assert response.json() == ["ssh-ed25519 KEY test@pc"] def test_get_root_key_when_none(authorized_client, ssh_on): response = authorized_client.get("/services/ssh/keys/root") assert response.status_code == 200 - assert response.json == [] + assert response.json() == [] def test_get_root_key_on_undefined(authorized_client, undefined_settings): response = authorized_client.get("/services/ssh/keys/root") assert response.status_code == 200 - assert response.json == [] + assert response.json() == [] def test_delete_root_key(authorized_client, root_and_admin_have_keys): @@ -310,6 +307,10 @@ def test_delete_root_key(authorized_client, root_and_admin_have_keys): not in read_json(root_and_admin_have_keys / "root_and_admin_have_keys.json")[ "ssh" ] + or read_json(root_and_admin_have_keys / "root_and_admin_have_keys.json")["ssh"][ + "rootKeys" + ] + == [] ) @@ -330,19 +331,19 @@ def test_delete_root_key_on_undefined(authorized_client, undefined_settings): "/services/ssh/keys/root", json={"public_key": "ssh-ed25519 KEY test@pc"} ) assert response.status_code == 404 - assert read_json(undefined_settings / "undefined.json")["ssh"]["rootKeys"] == [] + assert "ssh" not in read_json(undefined_settings / "undefined.json") def test_get_admin_key(authorized_client, root_and_admin_have_keys): response = authorized_client.get("/services/ssh/keys/tester") assert response.status_code == 200 - assert response.json == ["ssh-rsa KEY test@pc"] + assert response.json() == ["ssh-rsa KEY test@pc"] def test_get_admin_key_when_none(authorized_client, ssh_on): response = authorized_client.get("/services/ssh/keys/tester") assert response.status_code == 200 - assert response.json == [] + assert response.json() == [] def test_delete_admin_key(authorized_client, root_and_admin_have_keys): @@ -371,7 +372,7 @@ def test_delete_admin_key_on_undefined(authorized_client, undefined_settings): "/services/ssh/keys/tester", json={"public_key": "ssh-rsa KEY test@pc"} ) assert response.status_code == 404 - assert read_json(undefined_settings / "undefined.json")["sshKeys"] == [] + assert "sshKeys" not in read_json(undefined_settings / "undefined.json") def test_add_admin_key(authorized_client, ssh_on): @@ -418,9 +419,9 @@ def test_get_user_key(authorized_client, some_users, user): response = authorized_client.get(f"/services/ssh/keys/user{user}") assert response.status_code == 200 if user == 1: - assert response.json == ["ssh-rsa KEY user1@pc"] + assert response.json() == ["ssh-rsa KEY user1@pc"] else: - assert response.json == [] + assert response.json() == [] def test_get_keys_of_nonexistent_user(authorized_client, some_users): @@ -483,7 +484,13 @@ def test_delete_nonexistent_user_key(authorized_client, some_users, user): f"/services/ssh/keys/user{user}", json={"public_key": "ssh-rsa KEY user1@pc"} ) assert response.status_code == 404 - assert read_json(some_users / "some_users.json")["users"][user - 1]["sshKeys"] == [] + if user == 2: + assert ( + read_json(some_users / "some_users.json")["users"][user - 1]["sshKeys"] + == [] + ) + if user == 3: + "sshKeys" not in read_json(some_users / "some_users.json")["users"][user - 1] def test_add_keys_of_nonexistent_user(authorized_client, some_users): diff --git a/tests/services/test_ssh/all_off.json b/tests/test_rest_endpoints/services/test_ssh/all_off.json similarity index 100% rename from tests/services/test_ssh/all_off.json rename to tests/test_rest_endpoints/services/test_ssh/all_off.json diff --git a/tests/services/test_ssh/root_and_admin_have_keys.json b/tests/test_rest_endpoints/services/test_ssh/root_and_admin_have_keys.json similarity index 100% rename from tests/services/test_ssh/root_and_admin_have_keys.json rename to tests/test_rest_endpoints/services/test_ssh/root_and_admin_have_keys.json diff --git a/tests/services/test_ssh/some_users.json b/tests/test_rest_endpoints/services/test_ssh/some_users.json similarity index 100% rename from tests/services/test_ssh/some_users.json rename to tests/test_rest_endpoints/services/test_ssh/some_users.json diff --git a/tests/services/test_ssh/turned_off.json b/tests/test_rest_endpoints/services/test_ssh/turned_off.json similarity index 100% rename from tests/services/test_ssh/turned_off.json rename to tests/test_rest_endpoints/services/test_ssh/turned_off.json diff --git a/tests/services/test_ssh/turned_on.json b/tests/test_rest_endpoints/services/test_ssh/turned_on.json similarity index 100% rename from tests/services/test_ssh/turned_on.json rename to tests/test_rest_endpoints/services/test_ssh/turned_on.json diff --git a/tests/services/test_ssh/undefined.json b/tests/test_rest_endpoints/services/test_ssh/undefined.json similarity index 100% rename from tests/services/test_ssh/undefined.json rename to tests/test_rest_endpoints/services/test_ssh/undefined.json diff --git a/tests/services/test_ssh/undefined_values.json b/tests/test_rest_endpoints/services/test_ssh/undefined_values.json similarity index 100% rename from tests/services/test_ssh/undefined_values.json rename to tests/test_rest_endpoints/services/test_ssh/undefined_values.json diff --git a/tests/test_auth.py b/tests/test_rest_endpoints/test_auth.py similarity index 87% rename from tests/test_auth.py rename to tests/test_rest_endpoints/test_auth.py index d209c9c..1083be5 100644 --- a/tests/test_auth.py +++ b/tests/test_rest_endpoints/test_auth.py @@ -2,12 +2,10 @@ # pylint: disable=unused-argument # pylint: disable=missing-function-docstring import datetime -import json -import re import pytest from mnemonic import Mnemonic -from .common import read_json, write_json +from tests.common import read_json, write_json TOKENS_FILE_CONTETS = { @@ -36,11 +34,11 @@ DATE_FORMATS = [ def test_get_tokens_info(authorized_client, tokens_file): response = authorized_client.get("/auth/tokens") assert response.status_code == 200 - assert response.json == [ - {"name": "test_token", "date": "2022-01-14 08:31:10.789314", "is_caller": True}, + assert response.json() == [ + {"name": "test_token", "date": "2022-01-14T08:31:10.789314", "is_caller": True}, { "name": "test_token2", - "date": "2022-01-14 08:31:10.789314", + "date": "2022-01-14T08:31:10.789314", "is_caller": False, }, ] @@ -98,7 +96,7 @@ def test_refresh_token_unauthorized(client, tokens_file): def test_refresh_token(authorized_client, tokens_file): response = authorized_client.post("/auth/tokens") assert response.status_code == 200 - new_token = response.json["token"] + new_token = response.json()["token"] assert read_json(tokens_file)["tokens"][0]["token"] == new_token @@ -106,7 +104,7 @@ def test_refresh_token(authorized_client, tokens_file): def test_get_new_device_auth_token_unauthorized(client, tokens_file): - response = client.get("/auth/new_device") + response = client.post("/auth/new_device") assert response.status_code == 401 assert read_json(tokens_file) == TOKENS_FILE_CONTETS @@ -114,19 +112,19 @@ def test_get_new_device_auth_token_unauthorized(client, tokens_file): def test_get_new_device_auth_token(authorized_client, tokens_file): response = authorized_client.post("/auth/new_device") assert response.status_code == 200 - assert "token" in response.json - token = Mnemonic(language="english").to_entropy(response.json["token"]).hex() + assert "token" in response.json() + token = Mnemonic(language="english").to_entropy(response.json()["token"]).hex() assert read_json(tokens_file)["new_device"]["token"] == token def test_get_and_delete_new_device_token(authorized_client, tokens_file): response = authorized_client.post("/auth/new_device") assert response.status_code == 200 - assert "token" in response.json - token = Mnemonic(language="english").to_entropy(response.json["token"]).hex() + assert "token" in response.json() + token = Mnemonic(language="english").to_entropy(response.json()["token"]).hex() assert read_json(tokens_file)["new_device"]["token"] == token response = authorized_client.delete( - "/auth/new_device", json={"token": response.json["token"]} + "/auth/new_device", json={"token": response.json()["token"]} ) assert response.status_code == 200 assert read_json(tokens_file) == TOKENS_FILE_CONTETS @@ -141,15 +139,15 @@ def test_delete_token_unauthenticated(client, tokens_file): def test_get_and_authorize_new_device(client, authorized_client, tokens_file): response = authorized_client.post("/auth/new_device") assert response.status_code == 200 - assert "token" in response.json - token = Mnemonic(language="english").to_entropy(response.json["token"]).hex() + assert "token" in response.json() + token = Mnemonic(language="english").to_entropy(response.json()["token"]).hex() assert read_json(tokens_file)["new_device"]["token"] == token response = client.post( "/auth/new_device/authorize", - json={"token": response.json["token"], "device": "new_device"}, + json={"token": response.json()["token"], "device": "new_device"}, ) assert response.status_code == 200 - assert read_json(tokens_file)["tokens"][2]["token"] == response.json["token"] + assert read_json(tokens_file)["tokens"][2]["token"] == response.json()["token"] assert read_json(tokens_file)["tokens"][2]["name"] == "new_device" @@ -165,19 +163,19 @@ def test_authorize_new_device_with_invalid_token(client, tokens_file): def test_get_and_authorize_used_token(client, authorized_client, tokens_file): response = authorized_client.post("/auth/new_device") assert response.status_code == 200 - assert "token" in response.json - token = Mnemonic(language="english").to_entropy(response.json["token"]).hex() + assert "token" in response.json() + token = Mnemonic(language="english").to_entropy(response.json()["token"]).hex() assert read_json(tokens_file)["new_device"]["token"] == token response = client.post( "/auth/new_device/authorize", - json={"token": response.json["token"], "device": "new_device"}, + json={"token": response.json()["token"], "device": "new_device"}, ) assert response.status_code == 200 - assert read_json(tokens_file)["tokens"][2]["token"] == response.json["token"] + assert read_json(tokens_file)["tokens"][2]["token"] == response.json()["token"] assert read_json(tokens_file)["tokens"][2]["name"] == "new_device" response = client.post( "/auth/new_device/authorize", - json={"token": response.json["token"], "device": "new_device"}, + json={"token": response.json()["token"], "device": "new_device"}, ) assert response.status_code == 404 @@ -187,8 +185,8 @@ def test_get_and_authorize_token_after_12_minutes( ): response = authorized_client.post("/auth/new_device") assert response.status_code == 200 - assert "token" in response.json - token = Mnemonic(language="english").to_entropy(response.json["token"]).hex() + assert "token" in response.json() + token = Mnemonic(language="english").to_entropy(response.json()["token"]).hex() assert read_json(tokens_file)["new_device"]["token"] == token file_data = read_json(tokens_file) @@ -199,7 +197,7 @@ def test_get_and_authorize_token_after_12_minutes( response = client.post( "/auth/new_device/authorize", - json={"token": response.json["token"], "device": "new_device"}, + json={"token": response.json()["token"], "device": "new_device"}, ) assert response.status_code == 404 @@ -209,7 +207,7 @@ def test_authorize_without_token(client, tokens_file): "/auth/new_device/authorize", json={"device": "new_device"}, ) - assert response.status_code == 400 + assert response.status_code == 422 assert read_json(tokens_file) == TOKENS_FILE_CONTETS @@ -245,7 +243,7 @@ def test_get_recovery_token_status_unauthorized(client, tokens_file): def test_get_recovery_token_when_none_exists(authorized_client, tokens_file): response = authorized_client.get("/auth/recovery_token") assert response.status_code == 200 - assert response.json == { + assert response.json() == { "exists": False, "valid": False, "date": None, @@ -259,8 +257,8 @@ def test_generate_recovery_token(authorized_client, client, tokens_file): # Generate token without expiration and uses_left response = authorized_client.post("/auth/recovery_token") assert response.status_code == 200 - assert "token" in response.json - mnemonic_token = response.json["token"] + assert "token" in response.json() + mnemonic_token = response.json()["token"] token = Mnemonic(language="english").to_entropy(mnemonic_token).hex() assert read_json(tokens_file)["recovery_token"]["token"] == token @@ -274,9 +272,9 @@ def test_generate_recovery_token(authorized_client, client, tokens_file): ) # Try to get token status - response = client.get("/auth/recovery_token") + response = authorized_client.get("/auth/recovery_token") assert response.status_code == 200 - assert response.json == { + assert response.json() == { "exists": True, "valid": True, "date": time_generated, @@ -290,7 +288,7 @@ def test_generate_recovery_token(authorized_client, client, tokens_file): json={"token": mnemonic_token, "device": "recovery_device"}, ) assert recovery_response.status_code == 200 - new_token = recovery_response.json["token"] + new_token = recovery_response.json()["token"] assert read_json(tokens_file)["tokens"][2]["token"] == new_token assert read_json(tokens_file)["tokens"][2]["name"] == "recovery_device" @@ -300,7 +298,7 @@ def test_generate_recovery_token(authorized_client, client, tokens_file): json={"token": mnemonic_token, "device": "recovery_device2"}, ) assert recovery_response.status_code == 200 - new_token = recovery_response.json["token"] + new_token = recovery_response.json()["token"] assert read_json(tokens_file)["tokens"][3]["token"] == new_token assert read_json(tokens_file)["tokens"][3]["name"] == "recovery_device2" @@ -318,8 +316,8 @@ def test_generate_recovery_token_with_expiration_date( json={"expiration": expiration_date_str}, ) assert response.status_code == 200 - assert "token" in response.json - mnemonic_token = response.json["token"] + assert "token" in response.json() + mnemonic_token = response.json()["token"] token = Mnemonic(language="english").to_entropy(mnemonic_token).hex() assert read_json(tokens_file)["recovery_token"]["token"] == token assert datetime.datetime.strptime( @@ -336,9 +334,9 @@ def test_generate_recovery_token_with_expiration_date( ) # Try to get token status - response = client.get("/auth/recovery_token") + response = authorized_client.get("/auth/recovery_token") assert response.status_code == 200 - assert response.json == { + assert response.json() == { "exists": True, "valid": True, "date": time_generated, @@ -352,7 +350,7 @@ def test_generate_recovery_token_with_expiration_date( json={"token": mnemonic_token, "device": "recovery_device"}, ) assert recovery_response.status_code == 200 - new_token = recovery_response.json["token"] + new_token = recovery_response.json()["token"] assert read_json(tokens_file)["tokens"][2]["token"] == new_token assert read_json(tokens_file)["tokens"][2]["name"] == "recovery_device" @@ -362,7 +360,7 @@ def test_generate_recovery_token_with_expiration_date( json={"token": mnemonic_token, "device": "recovery_device2"}, ) assert recovery_response.status_code == 200 - new_token = recovery_response.json["token"] + new_token = recovery_response.json()["token"] assert read_json(tokens_file)["tokens"][3]["token"] == new_token assert read_json(tokens_file)["tokens"][3]["name"] == "recovery_device2" @@ -381,9 +379,9 @@ def test_generate_recovery_token_with_expiration_date( assert read_json(tokens_file)["tokens"] == new_data["tokens"] # Get the status of the token - response = client.get("/auth/recovery_token") + response = authorized_client.get("/auth/recovery_token") assert response.status_code == 200 - assert response.json == { + assert response.json() == { "exists": True, "valid": False, "date": time_generated, @@ -397,7 +395,7 @@ def test_generate_recovery_token_with_expiration_in_the_past( authorized_client, tokens_file, timeformat ): # Server must return 400 if expiration date is in the past - expiration_date = datetime.datetime.now() - datetime.timedelta(minutes=5) + expiration_date = datetime.datetime.utcnow() - datetime.timedelta(minutes=5) expiration_date_str = expiration_date.strftime(timeformat) response = authorized_client.post( "/auth/recovery_token", @@ -416,7 +414,7 @@ def test_generate_recovery_token_with_invalid_time_format( "/auth/recovery_token", json={"expiration": expiration_date}, ) - assert response.status_code == 400 + assert response.status_code == 422 assert "recovery_token" not in read_json(tokens_file) @@ -429,8 +427,8 @@ def test_generate_recovery_token_with_limited_uses( json={"uses": 2}, ) assert response.status_code == 200 - assert "token" in response.json - mnemonic_token = response.json["token"] + assert "token" in response.json() + mnemonic_token = response.json()["token"] token = Mnemonic(language="english").to_entropy(mnemonic_token).hex() assert read_json(tokens_file)["recovery_token"]["token"] == token assert read_json(tokens_file)["recovery_token"]["uses_left"] == 2 @@ -445,9 +443,9 @@ def test_generate_recovery_token_with_limited_uses( ) # Try to get token status - response = client.get("/auth/recovery_token") + response = authorized_client.get("/auth/recovery_token") assert response.status_code == 200 - assert response.json == { + assert response.json() == { "exists": True, "valid": True, "date": time_generated, @@ -461,16 +459,16 @@ def test_generate_recovery_token_with_limited_uses( json={"token": mnemonic_token, "device": "recovery_device"}, ) assert recovery_response.status_code == 200 - new_token = recovery_response.json["token"] + new_token = recovery_response.json()["token"] assert read_json(tokens_file)["tokens"][2]["token"] == new_token assert read_json(tokens_file)["tokens"][2]["name"] == "recovery_device" assert read_json(tokens_file)["recovery_token"]["uses_left"] == 1 # Get the status of the token - response = client.get("/auth/recovery_token") + response = authorized_client.get("/auth/recovery_token") assert response.status_code == 200 - assert response.json == { + assert response.json() == { "exists": True, "valid": True, "date": time_generated, @@ -484,14 +482,14 @@ def test_generate_recovery_token_with_limited_uses( json={"token": mnemonic_token, "device": "recovery_device2"}, ) assert recovery_response.status_code == 200 - new_token = recovery_response.json["token"] + new_token = recovery_response.json()["token"] assert read_json(tokens_file)["tokens"][3]["token"] == new_token assert read_json(tokens_file)["tokens"][3]["name"] == "recovery_device2" # Get the status of the token - response = client.get("/auth/recovery_token") + response = authorized_client.get("/auth/recovery_token") assert response.status_code == 200 - assert response.json == { + assert response.json() == { "exists": True, "valid": False, "date": time_generated, diff --git a/tests/test_system.py b/tests/test_rest_endpoints/test_system.py similarity index 97% rename from tests/test_system.py rename to tests/test_rest_endpoints/test_system.py index b9c8649..90c1499 100644 --- a/tests/test_system.py +++ b/tests/test_rest_endpoints/test_system.py @@ -123,13 +123,13 @@ def test_get_timezone_unauthorized(client, turned_on): def test_get_timezone(authorized_client, turned_on): response = authorized_client.get("/system/configuration/timezone") assert response.status_code == 200 - assert response.get_json() == "Europe/Moscow" + assert response.json() == "Europe/Moscow" def test_get_timezone_on_undefined(authorized_client, undefined_config): response = authorized_client.get("/system/configuration/timezone") assert response.status_code == 200 - assert response.get_json() == "Europe/Uzhgorod" + assert response.json() == "Europe/Uzhgorod" def test_put_timezone_unauthorized(client, turned_on): @@ -159,7 +159,7 @@ def test_put_timezone_on_undefined(authorized_client, undefined_config): def test_put_timezone_without_timezone(authorized_client, turned_on): response = authorized_client.put("/system/configuration/timezone", json={}) - assert response.status_code == 400 + assert response.status_code == 422 assert read_json(turned_on / "turned_on.json")["timezone"] == "Europe/Moscow" @@ -182,7 +182,7 @@ def test_get_auto_upgrade_unauthorized(client, turned_on): def test_get_auto_upgrade(authorized_client, turned_on): response = authorized_client.get("/system/configuration/autoUpgrade") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "enable": True, "allowReboot": True, } @@ -191,7 +191,7 @@ def test_get_auto_upgrade(authorized_client, turned_on): def test_get_auto_upgrade_on_undefined(authorized_client, undefined_config): response = authorized_client.get("/system/configuration/autoUpgrade") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "enable": True, "allowReboot": False, } @@ -200,7 +200,7 @@ def test_get_auto_upgrade_on_undefined(authorized_client, undefined_config): def test_get_auto_upgrade_without_values(authorized_client, no_values): response = authorized_client.get("/system/configuration/autoUpgrade") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "enable": True, "allowReboot": False, } @@ -209,7 +209,7 @@ def test_get_auto_upgrade_without_values(authorized_client, no_values): def test_get_auto_upgrade_turned_off(authorized_client, turned_off): response = authorized_client.get("/system/configuration/autoUpgrade") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "enable": False, "allowReboot": False, } @@ -357,7 +357,7 @@ def test_get_system_version_unauthorized(client, mock_subprocess_check_output): def test_get_system_version(authorized_client, mock_subprocess_check_output): response = authorized_client.get("/system/version") assert response.status_code == 200 - assert response.get_json() == {"system_version": "Testing Linux"} + assert response.json() == {"system_version": "Testing Linux"} assert mock_subprocess_check_output.call_count == 1 assert mock_subprocess_check_output.call_args[0][0] == ["uname", "-a"] @@ -384,7 +384,7 @@ def test_get_python_version_unauthorized(client, mock_subprocess_check_output): def test_get_python_version(authorized_client, mock_subprocess_check_output): response = authorized_client.get("/system/pythonVersion") assert response.status_code == 200 - assert response.get_json() == "Testing Linux" + assert response.json() == "Testing Linux" assert mock_subprocess_check_output.call_count == 1 assert mock_subprocess_check_output.call_args[0][0] == ["python", "-V"] diff --git a/tests/test_system/domain b/tests/test_rest_endpoints/test_system/domain similarity index 100% rename from tests/test_system/domain rename to tests/test_rest_endpoints/test_system/domain diff --git a/tests/test_system/no_values.json b/tests/test_rest_endpoints/test_system/no_values.json similarity index 100% rename from tests/test_system/no_values.json rename to tests/test_rest_endpoints/test_system/no_values.json diff --git a/tests/test_system/turned_off.json b/tests/test_rest_endpoints/test_system/turned_off.json similarity index 100% rename from tests/test_system/turned_off.json rename to tests/test_rest_endpoints/test_system/turned_off.json diff --git a/tests/test_system/turned_on.json b/tests/test_rest_endpoints/test_system/turned_on.json similarity index 100% rename from tests/test_system/turned_on.json rename to tests/test_rest_endpoints/test_system/turned_on.json diff --git a/tests/test_system/undefined.json b/tests/test_rest_endpoints/test_system/undefined.json similarity index 100% rename from tests/test_system/undefined.json rename to tests/test_rest_endpoints/test_system/undefined.json diff --git a/tests/test_users.py b/tests/test_rest_endpoints/test_users.py similarity index 93% rename from tests/test_users.py rename to tests/test_rest_endpoints/test_users.py index 9374ef2..ebb3eff 100644 --- a/tests/test_users.py +++ b/tests/test_rest_endpoints/test_users.py @@ -121,31 +121,31 @@ def test_get_users_unauthorized(client, some_users, mock_subprocess_popen): def test_get_some_users(authorized_client, some_users, mock_subprocess_popen): response = authorized_client.get("/users") assert response.status_code == 200 - assert response.json == ["user1", "user2", "user3"] + assert response.json() == ["user1", "user2", "user3"] def test_get_one_user(authorized_client, one_user, mock_subprocess_popen): response = authorized_client.get("/users") assert response.status_code == 200 - assert response.json == ["user1"] + assert response.json() == ["user1"] def test_get_one_user_with_main(authorized_client, one_user, mock_subprocess_popen): response = authorized_client.get("/users?withMainUser=true") assert response.status_code == 200 - assert response.json == ["tester", "user1"] + assert response.json().sort() == ["tester", "user1"].sort() def test_get_no_users(authorized_client, no_users, mock_subprocess_popen): response = authorized_client.get("/users") assert response.status_code == 200 - assert response.json == [] + assert response.json() == [] def test_get_no_users_with_main(authorized_client, no_users, mock_subprocess_popen): response = authorized_client.get("/users?withMainUser=true") assert response.status_code == 200 - assert response.json == ["tester"] + assert response.json() == ["tester"] def test_get_undefined_users( @@ -153,7 +153,7 @@ def test_get_undefined_users( ): response = authorized_client.get("/users") assert response.status_code == 200 - assert response.json == [] + assert response.json() == [] def test_post_users_unauthorized(client, some_users, mock_subprocess_popen): @@ -174,6 +174,7 @@ def test_post_one_user(authorized_client, one_user, mock_subprocess_popen): }, { "username": "user4", + "sshKeys": [], "hashedPassword": "NEW_HASHED", }, ] @@ -181,19 +182,19 @@ def test_post_one_user(authorized_client, one_user, mock_subprocess_popen): def test_post_without_username(authorized_client, one_user, mock_subprocess_popen): response = authorized_client.post("/users", json={"password": "password"}) - assert response.status_code == 400 + assert response.status_code == 422 def test_post_without_password(authorized_client, one_user, mock_subprocess_popen): response = authorized_client.post("/users", json={"username": "user4"}) - assert response.status_code == 400 + assert response.status_code == 422 def test_post_without_username_and_password( authorized_client, one_user, mock_subprocess_popen ): response = authorized_client.post("/users", json={}) - assert response.status_code == 400 + assert response.status_code == 422 @pytest.mark.parametrize("username", invalid_usernames) @@ -226,7 +227,7 @@ def test_post_user_to_undefined_users( ) assert response.status_code == 201 assert read_json(undefined_settings / "undefined.json")["users"] == [ - {"username": "user4", "hashedPassword": "NEW_HASHED"} + {"username": "user4", "sshKeys": [], "hashedPassword": "NEW_HASHED"} ] @@ -279,11 +280,6 @@ def test_delete_main_user(authorized_client, some_users, mock_subprocess_popen): assert response.status_code == 400 -def test_delete_without_argument(authorized_client, some_users, mock_subprocess_popen): - response = authorized_client.delete("/users/") - assert response.status_code == 404 - - def test_delete_just_delete(authorized_client, some_users, mock_subprocess_popen): response = authorized_client.delete("/users") assert response.status_code == 405 diff --git a/tests/test_users/no_users.json b/tests/test_rest_endpoints/test_users/no_users.json similarity index 100% rename from tests/test_users/no_users.json rename to tests/test_rest_endpoints/test_users/no_users.json diff --git a/tests/test_users/one_user.json b/tests/test_rest_endpoints/test_users/one_user.json similarity index 100% rename from tests/test_users/one_user.json rename to tests/test_rest_endpoints/test_users/one_user.json diff --git a/tests/test_users/some_users.json b/tests/test_rest_endpoints/test_users/some_users.json similarity index 100% rename from tests/test_users/some_users.json rename to tests/test_rest_endpoints/test_users/some_users.json diff --git a/tests/test_users/undefined.json b/tests/test_rest_endpoints/test_users/undefined.json similarity index 100% rename from tests/test_users/undefined.json rename to tests/test_rest_endpoints/test_users/undefined.json