diff --git a/.gitignore b/.gitignore index 1264e45..7941396 100755 --- a/.gitignore +++ b/.gitignore @@ -145,3 +145,5 @@ dmypy.json cython_debug/ # End of https://www.toptal.com/developers/gitignore/api/flask + +*.db diff --git a/.pylintrc b/.pylintrc new file mode 100644 index 0000000..9135ea9 --- /dev/null +++ b/.pylintrc @@ -0,0 +1,3 @@ +[MASTER] +init-hook="from pylint.config import find_pylintrc; import os, sys; sys.path.append(os.path.dirname(find_pylintrc()))" +extension-pkg-whitelist=pydantic diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 0000000..a691ce0 --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,19 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "Python: FastAPI", + "type": "python", + "request": "launch", + "module": "uvicorn", + "args": [ + "selfprivacy_api.app:app" + ], + "jinja": true, + "justMyCode": false + } + ] +} diff --git a/.vscode/settings.json b/.vscode/settings.json index ccb092d..8f927dc 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -6,5 +6,7 @@ "tests" ], "python.testing.unittestEnabled": false, - "python.testing.pytestEnabled": true + "python.testing.pytestEnabled": true, + "python.languageServer": "Pylance", + "python.analysis.typeCheckingMode": "basic" } \ No newline at end of file diff --git a/api.nix b/api.nix new file mode 100644 index 0000000..83bc695 --- /dev/null +++ b/api.nix @@ -0,0 +1,64 @@ +{ lib, python39Packages }: +with python39Packages; +buildPythonApplication { + pname = "selfprivacy-api"; + version = "2.0.0"; + + propagatedBuildInputs = [ + setuptools + portalocker + pytz + pytest + pytest-mock + pytest-datadir + huey + gevent + mnemonic + pydantic + typing-extensions + psutil + fastapi + uvicorn + (buildPythonPackage rec { + pname = "strawberry-graphql"; + version = "0.123.0"; + format = "pyproject"; + patches = [ + ./strawberry-graphql.patch + ]; + propagatedBuildInputs = [ + typing-extensions + python-multipart + python-dateutil + # flask + pydantic + pygments + poetry + # flask-cors + (buildPythonPackage rec { + pname = "graphql-core"; + version = "3.2.0"; + format = "setuptools"; + src = fetchPypi { + inherit pname version; + sha256 = "sha256-huKgvgCL/eGe94OI3opyWh2UKpGQykMcJKYIN5c4A84="; + }; + checkInputs = [ + pytest-asyncio + pytest-benchmark + pytestCheckHook + ]; + pythonImportsCheck = [ + "graphql" + ]; + }) + ]; + src = fetchPypi { + inherit pname version; + sha256 = "KsmZ5Xv8tUg6yBxieAEtvoKoRG60VS+iVGV0X6oCExo="; + }; + }) + ]; + + src = ./.; +} diff --git a/default.nix b/default.nix new file mode 100644 index 0000000..740c7ce --- /dev/null +++ b/default.nix @@ -0,0 +1,2 @@ +{ pkgs ? import {} }: +pkgs.callPackage ./api.nix {} diff --git a/pyproject.toml b/pyproject.toml index 1ffd18c..7f8d872 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,3 +1,3 @@ [build-system] -requires = ["setuptools", "wheel", "portalocker", "flask-swagger", "flask-swagger-ui"] -build-backend = "setuptools.build_meta" \ No newline at end of file +requires = ["setuptools", "wheel", "portalocker"] +build-backend = "setuptools.build_meta" diff --git a/requirements.txt b/requirements.txt deleted file mode 100755 index 4e0e02e..0000000 --- a/requirements.txt +++ /dev/null @@ -1,17 +0,0 @@ -wheel -flask -flask_restful -flask_socketio -setuptools -portalocker -flask-swagger -flask-swagger-ui -pytz -huey -gevent -mnemonic - -pytest -coverage -pytest-mock -pytest-datadir diff --git a/selfprivacy_api/resources/__init__.py b/selfprivacy_api/actions/__init__.py similarity index 100% rename from selfprivacy_api/resources/__init__.py rename to selfprivacy_api/actions/__init__.py diff --git a/selfprivacy_api/actions/api_tokens.py b/selfprivacy_api/actions/api_tokens.py new file mode 100644 index 0000000..61c695d --- /dev/null +++ b/selfprivacy_api/actions/api_tokens.py @@ -0,0 +1,116 @@ +"""App tokens actions""" +from datetime import datetime +from typing import Optional +from pydantic import BaseModel + + +from selfprivacy_api.utils.auth import ( + delete_token, + generate_recovery_token, + get_recovery_token_status, + get_tokens_info, + is_recovery_token_exists, + is_recovery_token_valid, + is_token_name_exists, + is_token_name_pair_valid, + refresh_token, + get_token_name, +) + + +class TokenInfoWithIsCaller(BaseModel): + """Token info""" + + name: str + date: datetime + is_caller: bool + + +def get_api_tokens_with_caller_flag(caller_token: str) -> list[TokenInfoWithIsCaller]: + """Get the tokens info""" + caller_name = get_token_name(caller_token) + tokens = get_tokens_info() + return [ + TokenInfoWithIsCaller( + name=token.name, + date=token.date, + is_caller=token.name == caller_name, + ) + for token in tokens + ] + + +class NotFoundException(Exception): + """Not found exception""" + + +class CannotDeleteCallerException(Exception): + """Cannot delete caller exception""" + + +def delete_api_token(caller_token: str, token_name: str) -> None: + """Delete the token""" + if is_token_name_pair_valid(token_name, caller_token): + raise CannotDeleteCallerException("Cannot delete caller's token") + if not is_token_name_exists(token_name): + raise NotFoundException("Token not found") + delete_token(token_name) + + +def refresh_api_token(caller_token: str) -> str: + """Refresh the token""" + new_token = refresh_token(caller_token) + if new_token is None: + raise NotFoundException("Token not found") + return new_token + + +class RecoveryTokenStatus(BaseModel): + """Recovery token status""" + + exists: bool + valid: bool + date: Optional[datetime] = None + expiration: Optional[datetime] = None + uses_left: Optional[int] = None + + +def get_api_recovery_token_status() -> RecoveryTokenStatus: + """Get the recovery token status""" + if not is_recovery_token_exists(): + return RecoveryTokenStatus(exists=False, valid=False) + status = get_recovery_token_status() + if status is None: + return RecoveryTokenStatus(exists=False, valid=False) + is_valid = is_recovery_token_valid() + return RecoveryTokenStatus( + exists=True, + valid=is_valid, + date=status["date"], + expiration=status["expiration"], + uses_left=status["uses_left"], + ) + + +class InvalidExpirationDate(Exception): + """Invalid expiration date exception""" + + +class InvalidUsesLeft(Exception): + """Invalid uses left exception""" + + +def get_new_api_recovery_key( + expiration_date: Optional[datetime] = None, uses_left: Optional[int] = None +) -> str: + """Get new recovery key""" + if expiration_date is not None: + current_time = datetime.now().timestamp() + if expiration_date.timestamp() < current_time: + raise InvalidExpirationDate("Expiration date is in the past") + if uses_left is not None: + if uses_left <= 0: + raise InvalidUsesLeft("Uses must be greater than 0") + + key = generate_recovery_token(expiration_date, uses_left) + return key diff --git a/selfprivacy_api/actions/ssh.py b/selfprivacy_api/actions/ssh.py new file mode 100644 index 0000000..3f79ff8 --- /dev/null +++ b/selfprivacy_api/actions/ssh.py @@ -0,0 +1,149 @@ +"""Actions to manage the SSH.""" +from typing import Optional +from pydantic import BaseModel +from selfprivacy_api.actions.users import ( + UserNotFound, + ensure_ssh_and_users_fields_exist, +) + +from selfprivacy_api.utils import WriteUserData, ReadUserData, validate_ssh_public_key + + +def enable_ssh(): + with WriteUserData() as data: + if "ssh" not in data: + data["ssh"] = {} + data["ssh"]["enable"] = True + + +class UserdataSshSettings(BaseModel): + """Settings for the SSH.""" + + enable: bool = True + passwordAuthentication: bool = True + rootKeys: list[str] = [] + + +def get_ssh_settings() -> UserdataSshSettings: + with ReadUserData() as data: + if "ssh" not in data: + return UserdataSshSettings() + if "enable" not in data["ssh"]: + data["ssh"]["enable"] = True + if "passwordAuthentication" not in data["ssh"]: + data["ssh"]["passwordAuthentication"] = True + if "rootKeys" not in data["ssh"]: + data["ssh"]["rootKeys"] = [] + return UserdataSshSettings(**data["ssh"]) + + +def set_ssh_settings( + enable: Optional[bool] = None, password_authentication: Optional[bool] = None +) -> None: + with WriteUserData() as data: + if "ssh" not in data: + data["ssh"] = {} + if enable is not None: + data["ssh"]["enable"] = enable + if password_authentication is not None: + data["ssh"]["passwordAuthentication"] = password_authentication + + +def add_root_ssh_key(public_key: str): + with WriteUserData() as data: + if "ssh" not in data: + data["ssh"] = {} + if "rootKeys" not in data["ssh"]: + data["ssh"]["rootKeys"] = [] + # Return 409 if key already in array + for key in data["ssh"]["rootKeys"]: + if key == public_key: + raise KeyAlreadyExists() + data["ssh"]["rootKeys"].append(public_key) + + +class KeyAlreadyExists(Exception): + """Key already exists""" + + pass + + +class InvalidPublicKey(Exception): + """Invalid public key""" + + pass + + +def create_ssh_key(username: str, ssh_key: str): + """Create a new ssh key""" + + if not validate_ssh_public_key(ssh_key): + raise InvalidPublicKey() + + with WriteUserData() as data: + ensure_ssh_and_users_fields_exist(data) + + if username == data["username"]: + if ssh_key in data["sshKeys"]: + raise KeyAlreadyExists() + + data["sshKeys"].append(ssh_key) + return + + if username == "root": + if ssh_key in data["ssh"]["rootKeys"]: + raise KeyAlreadyExists() + + data["ssh"]["rootKeys"].append(ssh_key) + return + + for user in data["users"]: + if user["username"] == username: + if "sshKeys" not in user: + user["sshKeys"] = [] + if ssh_key in user["sshKeys"]: + raise KeyAlreadyExists() + + user["sshKeys"].append(ssh_key) + return + + raise UserNotFound() + + +class KeyNotFound(Exception): + """Key not found""" + + pass + + +def remove_ssh_key(username: str, ssh_key: str): + """Delete a ssh key""" + + with WriteUserData() as data: + ensure_ssh_and_users_fields_exist(data) + + if username == "root": + if ssh_key in data["ssh"]["rootKeys"]: + data["ssh"]["rootKeys"].remove(ssh_key) + return + + raise KeyNotFound() + + if username == data["username"]: + if ssh_key in data["sshKeys"]: + data["sshKeys"].remove(ssh_key) + return + + raise KeyNotFound() + + for user in data["users"]: + if user["username"] == username: + if "sshKeys" not in user: + user["sshKeys"] = [] + if ssh_key in user["sshKeys"]: + user["sshKeys"].remove(ssh_key) + return + + raise KeyNotFound() + + raise UserNotFound() diff --git a/selfprivacy_api/actions/system.py b/selfprivacy_api/actions/system.py new file mode 100644 index 0000000..853662f --- /dev/null +++ b/selfprivacy_api/actions/system.py @@ -0,0 +1,139 @@ +"""Actions to manage the system.""" +import os +import subprocess +import pytz +from typing import Optional +from pydantic import BaseModel + +from selfprivacy_api.utils import WriteUserData, ReadUserData + + +def get_timezone() -> str: + """Get the timezone of the server""" + with ReadUserData() as user_data: + if "timezone" in user_data: + return user_data["timezone"] + return "Europe/Uzhgorod" + + +class InvalidTimezone(Exception): + """Invalid timezone""" + + pass + + +def change_timezone(timezone: str) -> None: + """Change the timezone of the server""" + if timezone not in pytz.all_timezones: + raise InvalidTimezone(f"Invalid timezone: {timezone}") + with WriteUserData() as user_data: + user_data["timezone"] = timezone + + +class UserDataAutoUpgradeSettings(BaseModel): + """Settings for auto-upgrading user data""" + + enable: bool = True + allowReboot: bool = False + + +def get_auto_upgrade_settings() -> UserDataAutoUpgradeSettings: + """Get the auto-upgrade settings""" + with ReadUserData() as user_data: + if "autoUpgrade" in user_data: + return UserDataAutoUpgradeSettings(**user_data["autoUpgrade"]) + return UserDataAutoUpgradeSettings() + + +def set_auto_upgrade_settings( + enalbe: Optional[bool] = None, allowReboot: Optional[bool] = None +) -> None: + """Set the auto-upgrade settings""" + with WriteUserData() as user_data: + if "autoUpgrade" not in user_data: + user_data["autoUpgrade"] = {} + if enalbe is not None: + user_data["autoUpgrade"]["enable"] = enalbe + if allowReboot is not None: + user_data["autoUpgrade"]["allowReboot"] = allowReboot + + +def rebuild_system() -> int: + """Rebuild the system""" + rebuild_result = subprocess.Popen( + ["systemctl", "start", "sp-nixos-rebuild.service"], start_new_session=True + ) + rebuild_result.communicate()[0] + return rebuild_result.returncode + + +def rollback_system() -> int: + """Rollback the system""" + rollback_result = subprocess.Popen( + ["systemctl", "start", "sp-nixos-rollback.service"], start_new_session=True + ) + rollback_result.communicate()[0] + return rollback_result.returncode + + +def upgrade_system() -> int: + """Upgrade the system""" + upgrade_result = subprocess.Popen( + ["systemctl", "start", "sp-nixos-upgrade.service"], start_new_session=True + ) + upgrade_result.communicate()[0] + return upgrade_result.returncode + + +def reboot_system() -> None: + """Reboot the system""" + subprocess.Popen(["reboot"], start_new_session=True) + + +def get_system_version() -> str: + """Get system version""" + return subprocess.check_output(["uname", "-a"]).decode("utf-8").strip() + + +def get_python_version() -> str: + """Get Python version""" + return subprocess.check_output(["python", "-V"]).decode("utf-8").strip() + + +class SystemActionResult(BaseModel): + """System action result""" + + status: int + message: str + data: str + + +def pull_repository_changes() -> SystemActionResult: + """Pull repository changes""" + git_pull_command = ["git", "pull"] + + current_working_directory = os.getcwd() + os.chdir("/etc/nixos") + + git_pull_process_descriptor = subprocess.Popen( + git_pull_command, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + shell=False, + ) + + data = git_pull_process_descriptor.communicate()[0].decode("utf-8") + + os.chdir(current_working_directory) + + if git_pull_process_descriptor.returncode == 0: + return SystemActionResult( + status=0, + message="Pulled repository changes", + data=data, + ) + return SystemActionResult( + status=git_pull_process_descriptor.returncode, + message="Failed to pull repository changes", + data=data, + ) diff --git a/selfprivacy_api/actions/users.py b/selfprivacy_api/actions/users.py new file mode 100644 index 0000000..bfc1756 --- /dev/null +++ b/selfprivacy_api/actions/users.py @@ -0,0 +1,219 @@ +"""Actions to manage the users.""" +import re +from typing import Optional +from pydantic import BaseModel +from enum import Enum +from selfprivacy_api.utils import ( + ReadUserData, + WriteUserData, + hash_password, + is_username_forbidden, +) + + +class UserDataUserOrigin(Enum): + """Origin of the user in the user data""" + + NORMAL = "NORMAL" + PRIMARY = "PRIMARY" + ROOT = "ROOT" + + +class UserDataUser(BaseModel): + """The user model from the userdata file""" + + username: str + ssh_keys: list[str] + origin: UserDataUserOrigin + + +def ensure_ssh_and_users_fields_exist(data): + if "ssh" not in data: + data["ssh"] = {} + data["ssh"]["rootKeys"] = [] + + elif data["ssh"].get("rootKeys") is None: + data["ssh"]["rootKeys"] = [] + + if "sshKeys" not in data: + data["sshKeys"] = [] + + if "users" not in data: + data["users"] = [] + + +def get_users( + exclude_primary: bool = False, + exclude_root: bool = False, +) -> list[UserDataUser]: + """Get the list of users""" + users = [] + with ReadUserData() as user_data: + ensure_ssh_and_users_fields_exist(user_data) + users = [ + UserDataUser( + username=user["username"], + ssh_keys=user.get("sshKeys", []), + origin=UserDataUserOrigin.NORMAL, + ) + for user in user_data["users"] + ] + if not exclude_primary: + users.append( + UserDataUser( + username=user_data["username"], + ssh_keys=user_data["sshKeys"], + origin=UserDataUserOrigin.PRIMARY, + ) + ) + if not exclude_root: + users.append( + UserDataUser( + username="root", + ssh_keys=user_data["ssh"]["rootKeys"], + origin=UserDataUserOrigin.ROOT, + ) + ) + return users + + +class UsernameForbidden(Exception): + """Attemted to create a user with a forbidden username""" + + pass + + +class UserAlreadyExists(Exception): + """Attemted to create a user that already exists""" + + pass + + +class UsernameNotAlphanumeric(Exception): + """Attemted to create a user with a non-alphanumeric username""" + + pass + + +class UsernameTooLong(Exception): + """Attemted to create a user with a too long username. Username must be less than 32 characters""" + + pass + + +class PasswordIsEmpty(Exception): + """Attemted to create a user with an empty password""" + + pass + + +def create_user(username: str, password: str): + if password == "": + raise PasswordIsEmpty("Password is empty") + + if is_username_forbidden(username): + raise UsernameForbidden("Username is forbidden") + + if not re.match(r"^[a-z_][a-z0-9_]+$", username): + raise UsernameNotAlphanumeric( + "Username must be alphanumeric and start with a letter" + ) + + if len(username) >= 32: + raise UsernameTooLong("Username must be less than 32 characters") + + with ReadUserData() as user_data: + ensure_ssh_and_users_fields_exist(user_data) + if username == user_data["username"]: + raise UserAlreadyExists("User already exists") + if username in [user["username"] for user in user_data["users"]]: + raise UserAlreadyExists("User already exists") + + hashed_password = hash_password(password) + + with WriteUserData() as user_data: + ensure_ssh_and_users_fields_exist(user_data) + + user_data["users"].append( + {"username": username, "sshKeys": [], "hashedPassword": hashed_password} + ) + + +class UserNotFound(Exception): + """Attemted to get a user that does not exist""" + + pass + + +class UserIsProtected(Exception): + """Attemted to delete a user that is protected""" + + pass + + +def delete_user(username: str): + with WriteUserData() as user_data: + ensure_ssh_and_users_fields_exist(user_data) + if username == user_data["username"] or username == "root": + raise UserIsProtected("Cannot delete main or root user") + + for data_user in user_data["users"]: + if data_user["username"] == username: + user_data["users"].remove(data_user) + break + else: + raise UserNotFound("User did not exist") + + +def update_user(username: str, password: str): + if password == "": + raise PasswordIsEmpty("Password is empty") + + hashed_password = hash_password(password) + + with WriteUserData() as data: + ensure_ssh_and_users_fields_exist(data) + + if username == data["username"]: + data["hashedMasterPassword"] = hashed_password + + # Return 404 if user does not exist + else: + for data_user in data["users"]: + if data_user["username"] == username: + data_user["hashedPassword"] = hashed_password + break + else: + raise UserNotFound("User does not exist") + + +def get_user_by_username(username: str) -> Optional[UserDataUser]: + with ReadUserData() as data: + ensure_ssh_and_users_fields_exist(data) + + if username == "root": + return UserDataUser( + origin=UserDataUserOrigin.ROOT, + username="root", + ssh_keys=data["ssh"]["rootKeys"], + ) + + if username == data["username"]: + return UserDataUser( + origin=UserDataUserOrigin.PRIMARY, + username=username, + ssh_keys=data["sshKeys"], + ) + + for user in data["users"]: + if user["username"] == username: + if "sshKeys" not in user: + user["sshKeys"] = [] + + return UserDataUser( + origin=UserDataUserOrigin.NORMAL, + username=username, + ssh_keys=user["sshKeys"], + ) + + return None diff --git a/selfprivacy_api/app.py b/selfprivacy_api/app.py index 49e5bae..3436445 100644 --- a/selfprivacy_api/app.py +++ b/selfprivacy_api/app.py @@ -1,98 +1,56 @@ #!/usr/bin/env python3 """SelfPrivacy server management API""" -import os -from gevent import monkey +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware +from strawberry.fastapi import GraphQLRouter +import uvicorn -from flask import Flask, request, jsonify -from flask_restful import Api -from flask_swagger import swagger -from flask_swagger_ui import get_swaggerui_blueprint - -from selfprivacy_api.resources.users import User, Users -from selfprivacy_api.resources.common import ApiVersion -from selfprivacy_api.resources.system import api_system -from selfprivacy_api.resources.services import services as api_services -from selfprivacy_api.resources.api_auth import auth as api_auth - -from selfprivacy_api.restic_controller.tasks import huey, init_restic - +from selfprivacy_api.dependencies import get_api_version +from selfprivacy_api.graphql.schema import schema from selfprivacy_api.migrations import run_migrations +from selfprivacy_api.restic_controller.tasks import init_restic -from selfprivacy_api.utils.auth import is_token_valid +from selfprivacy_api.rest import ( + system, + users, + api_auth, + services, +) -swagger_blueprint = get_swaggerui_blueprint( - "/api/docs", "/api/swagger.json", config={"app_name": "SelfPrivacy API"} +app = FastAPI() + +graphql_app = GraphQLRouter( + schema, +) + +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], ) -def create_app(test_config=None): - """Initiate Flask app and bind routes""" - app = Flask(__name__) - api = Api(app) +app.include_router(system.router) +app.include_router(users.router) +app.include_router(api_auth.router) +app.include_router(services.router) +app.include_router(graphql_app, prefix="/graphql") - if test_config is None: - app.config["ENABLE_SWAGGER"] = os.environ.get("ENABLE_SWAGGER", "0") - app.config["B2_BUCKET"] = os.environ.get("B2_BUCKET") - else: - app.config.update(test_config) - # Check bearer token - @app.before_request - def check_auth(): - # Exclude swagger-ui, /auth/new_device/authorize, /auth/recovery_token/use - if request.path.startswith("/api"): - pass - elif request.path.startswith("/auth/new_device/authorize"): - pass - elif request.path.startswith("/auth/recovery_token/use"): - pass - else: - auth = request.headers.get("Authorization") - if auth is None: - return jsonify({"error": "Missing Authorization header"}), 401 - # Strip Bearer from auth header - auth = auth.replace("Bearer ", "") - if not is_token_valid(auth): - return jsonify({"error": "Invalid token"}), 401 +@app.get("/api/version") +async def get_version(): + """Get the version of the server""" + return {"version": get_api_version()} - api.add_resource(ApiVersion, "/api/version") - api.add_resource(Users, "/users") - api.add_resource(User, "/users/") - app.register_blueprint(api_system) - app.register_blueprint(api_services) - app.register_blueprint(api_auth) - - @app.route("/api/swagger.json") - def spec(): - if app.config["ENABLE_SWAGGER"] == "1": - swag = swagger(app) - swag["info"]["version"] = "1.2.7" - swag["info"]["title"] = "SelfPrivacy API" - swag["info"]["description"] = "SelfPrivacy API" - swag["securityDefinitions"] = { - "bearerAuth": { - "type": "apiKey", - "name": "Authorization", - "in": "header", - } - } - swag["security"] = [{"bearerAuth": []}] - - return jsonify(swag) - return jsonify({}), 404 - - if app.config["ENABLE_SWAGGER"] == "1": - app.register_blueprint(swagger_blueprint, url_prefix="/api/docs") - - return app +@app.on_event("startup") +async def startup(): + run_migrations() + init_restic() if __name__ == "__main__": - monkey.patch_all() - created_app = create_app() - run_migrations() - huey.start() - init_restic() - created_app.run(port=5050, debug=False) + uvicorn.run("selfprivacy_api.app:app", host="0.0.0.0", port=5050, log_level="info") diff --git a/selfprivacy_api/dependencies.py b/selfprivacy_api/dependencies.py new file mode 100644 index 0000000..109e2ce --- /dev/null +++ b/selfprivacy_api/dependencies.py @@ -0,0 +1,30 @@ +from fastapi import Depends, HTTPException, status +from fastapi.security import APIKeyHeader +from pydantic import BaseModel + +from selfprivacy_api.utils.auth import is_token_valid + + +class TokenHeader(BaseModel): + token: str + + +async def get_token_header( + token: str = Depends(APIKeyHeader(name="Authorization", auto_error=False)) +) -> TokenHeader: + if token is None: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail="Token not provided" + ) + else: + token = token.replace("Bearer ", "") + if not is_token_valid(token): + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid token" + ) + return TokenHeader(token=token) + + +def get_api_version() -> str: + """Get API version""" + return "2.0.0" diff --git a/selfprivacy_api/graphql/__init__.py b/selfprivacy_api/graphql/__init__.py new file mode 100644 index 0000000..7372197 --- /dev/null +++ b/selfprivacy_api/graphql/__init__.py @@ -0,0 +1,21 @@ +"""GraphQL API for SelfPrivacy.""" +# pylint: disable=too-few-public-methods +import typing +from strawberry.permission import BasePermission +from strawberry.types import Info + +from selfprivacy_api.utils.auth import is_token_valid + + +class IsAuthenticated(BasePermission): + """Is authenticated permission""" + + message = "You must be authenticated to access this resource." + + def has_permission(self, source: typing.Any, info: Info, **kwargs) -> bool: + token = info.context["request"].headers.get("Authorization") + if token is None: + token = info.context["request"].query_params.get("token") + if token is None: + return False + return is_token_valid(token.replace("Bearer ", "")) diff --git a/selfprivacy_api/graphql/common_types/__init__.py b/selfprivacy_api/graphql/common_types/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/selfprivacy_api/graphql/common_types/dns.py b/selfprivacy_api/graphql/common_types/dns.py new file mode 100644 index 0000000..c9f8413 --- /dev/null +++ b/selfprivacy_api/graphql/common_types/dns.py @@ -0,0 +1,13 @@ +import typing +import strawberry + + +@strawberry.type +class DnsRecord: + """DNS record""" + + record_type: str + name: str + content: str + ttl: int + priority: typing.Optional[int] diff --git a/selfprivacy_api/graphql/common_types/jobs.py b/selfprivacy_api/graphql/common_types/jobs.py new file mode 100644 index 0000000..4b095c8 --- /dev/null +++ b/selfprivacy_api/graphql/common_types/jobs.py @@ -0,0 +1,49 @@ +"""Jobs status""" +# pylint: disable=too-few-public-methods +import datetime +import typing +import strawberry + +from selfprivacy_api.jobs import Job, Jobs + + +@strawberry.type +class ApiJob: + """Job type for GraphQL.""" + + uid: str + name: str + description: str + status: str + status_text: typing.Optional[str] + progress: typing.Optional[int] + created_at: datetime.datetime + updated_at: datetime.datetime + finished_at: typing.Optional[datetime.datetime] + error: typing.Optional[str] + result: typing.Optional[str] + + +def job_to_api_job(job: Job) -> ApiJob: + """Convert a Job from jobs controller to a GraphQL ApiJob.""" + return ApiJob( + uid=str(job.uid), + name=job.name, + description=job.description, + status=job.status.name, + status_text=job.status_text, + progress=job.progress, + created_at=job.created_at, + updated_at=job.updated_at, + finished_at=job.finished_at, + error=job.error, + result=job.result, + ) + + +def get_api_job_by_id(job_id: str) -> typing.Optional[ApiJob]: + """Get a job for GraphQL by its ID.""" + job = Jobs.get_instance().get_job(job_id) + if job is None: + return None + return job_to_api_job(job) diff --git a/selfprivacy_api/graphql/common_types/service.py b/selfprivacy_api/graphql/common_types/service.py new file mode 100644 index 0000000..c1246ca --- /dev/null +++ b/selfprivacy_api/graphql/common_types/service.py @@ -0,0 +1,146 @@ +from enum import Enum +import typing +import strawberry +from selfprivacy_api.graphql.common_types.dns import DnsRecord + +from selfprivacy_api.services import get_service_by_id, get_services_by_location +from selfprivacy_api.services import Service as ServiceInterface +from selfprivacy_api.utils.block_devices import BlockDevices + + +def get_usages(root: "StorageVolume") -> list["StorageUsageInterface"]: + """Get usages of a volume""" + return [ + ServiceStorageUsage( + service=service_to_graphql_service(service), + title=service.get_display_name(), + used_space=str(service.get_storage_usage()), + volume=get_volume_by_id(service.get_location()), + ) + for service in get_services_by_location(root.name) + ] + + +@strawberry.type +class StorageVolume: + """Stats and basic info about a volume or a system disk.""" + + total_space: str + free_space: str + used_space: str + root: bool + name: str + model: typing.Optional[str] + serial: typing.Optional[str] + type: str + + @strawberry.field + def usages(self) -> list["StorageUsageInterface"]: + """Get usages of a volume""" + return get_usages(self) + + +@strawberry.interface +class StorageUsageInterface: + used_space: str + volume: typing.Optional[StorageVolume] + title: str + + +@strawberry.type +class ServiceStorageUsage(StorageUsageInterface): + """Storage usage for a service""" + + service: typing.Optional["Service"] + + +@strawberry.enum +class ServiceStatusEnum(Enum): + ACTIVE = "ACTIVE" + RELOADING = "RELOADING" + INACTIVE = "INACTIVE" + FAILED = "FAILED" + ACTIVATING = "ACTIVATING" + DEACTIVATING = "DEACTIVATING" + OFF = "OFF" + + +def get_storage_usage(root: "Service") -> ServiceStorageUsage: + """Get storage usage for a service""" + service = get_service_by_id(root.id) + if service is None: + return ServiceStorageUsage( + service=service, + title="Not found", + used_space="0", + volume=get_volume_by_id("sda1"), + ) + return ServiceStorageUsage( + service=service_to_graphql_service(service), + title=service.get_display_name(), + used_space=str(service.get_storage_usage()), + volume=get_volume_by_id(service.get_location()), + ) + + +@strawberry.type +class Service: + id: str + display_name: str + description: str + svg_icon: str + is_movable: bool + is_required: bool + is_enabled: bool + status: ServiceStatusEnum + url: typing.Optional[str] + dns_records: typing.Optional[typing.List[DnsRecord]] + + @strawberry.field + def storage_usage(self) -> ServiceStorageUsage: + """Get storage usage for a service""" + return get_storage_usage(self) + + +def service_to_graphql_service(service: ServiceInterface) -> Service: + """Convert service to graphql service""" + return Service( + id=service.get_id(), + display_name=service.get_display_name(), + description=service.get_description(), + svg_icon=service.get_svg_icon(), + is_movable=service.is_movable(), + is_required=service.is_required(), + is_enabled=service.is_enabled(), + status=ServiceStatusEnum(service.get_status().value), + url=service.get_url(), + dns_records=[ + DnsRecord( + record_type=record.type, + name=record.name, + content=record.content, + ttl=record.ttl, + priority=record.priority, + ) + for record in service.get_dns_records() + ], + ) + + +def get_volume_by_id(volume_id: str) -> typing.Optional[StorageVolume]: + """Get volume by id""" + volume = BlockDevices().get_block_device(volume_id) + if volume is None: + return None + return StorageVolume( + total_space=str(volume.fssize) + if volume.fssize is not None + else str(volume.size), + free_space=str(volume.fsavail), + used_space=str(volume.fsused), + root=volume.name == "sda1", + name=volume.name, + model=volume.model, + serial=volume.serial, + type=volume.type, + ) diff --git a/selfprivacy_api/graphql/common_types/user.py b/selfprivacy_api/graphql/common_types/user.py new file mode 100644 index 0000000..26ad6f2 --- /dev/null +++ b/selfprivacy_api/graphql/common_types/user.py @@ -0,0 +1,57 @@ +import typing +from enum import Enum +import strawberry +import selfprivacy_api.actions.users as users_actions + +from selfprivacy_api.graphql.mutations.mutation_interface import ( + MutationReturnInterface, +) + + +@strawberry.enum +class UserType(Enum): + NORMAL = "NORMAL" + PRIMARY = "PRIMARY" + ROOT = "ROOT" + + +@strawberry.type +class User: + + user_type: UserType + username: str + # userHomeFolderspace: UserHomeFolderUsage + ssh_keys: typing.List[str] = strawberry.field(default_factory=list) + + +@strawberry.type +class UserMutationReturn(MutationReturnInterface): + """Return type for user mutation""" + + user: typing.Optional[User] = None + + +def get_user_by_username(username: str) -> typing.Optional[User]: + + user = users_actions.get_user_by_username(username) + if user is None: + return None + + return User( + user_type=UserType(user.origin.value), + username=user.username, + ssh_keys=user.ssh_keys, + ) + + +def get_users() -> typing.List[User]: + """Get users""" + users = users_actions.get_users(exclude_root=True) + return [ + User( + user_type=UserType(user.origin.value), + username=user.username, + ssh_keys=user.ssh_keys, + ) + for user in users + ] diff --git a/selfprivacy_api/graphql/mutations/__init__.py b/selfprivacy_api/graphql/mutations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/selfprivacy_api/graphql/mutations/api_mutations.py b/selfprivacy_api/graphql/mutations/api_mutations.py new file mode 100644 index 0000000..c6727db --- /dev/null +++ b/selfprivacy_api/graphql/mutations/api_mutations.py @@ -0,0 +1,219 @@ +"""API access mutations""" +# pylint: disable=too-few-public-methods +import datetime +import typing +import strawberry +from strawberry.types import Info +from selfprivacy_api.actions.api_tokens import ( + CannotDeleteCallerException, + InvalidExpirationDate, + InvalidUsesLeft, + NotFoundException, + delete_api_token, + get_new_api_recovery_key, +) +from selfprivacy_api.graphql import IsAuthenticated +from selfprivacy_api.graphql.mutations.mutation_interface import ( + GenericMutationReturn, + MutationReturnInterface, +) + +from selfprivacy_api.utils.auth import ( + delete_new_device_auth_token, + get_new_device_auth_token, + refresh_token, + use_mnemonic_recoverery_token, + use_new_device_auth_token, +) + + +@strawberry.type +class ApiKeyMutationReturn(MutationReturnInterface): + key: typing.Optional[str] + + +@strawberry.type +class DeviceApiTokenMutationReturn(MutationReturnInterface): + token: typing.Optional[str] + + +@strawberry.input +class RecoveryKeyLimitsInput: + """Recovery key limits input""" + + expiration_date: typing.Optional[datetime.datetime] = None + uses: typing.Optional[int] = None + + +@strawberry.input +class UseRecoveryKeyInput: + """Use recovery key input""" + + key: str + deviceName: str + + +@strawberry.input +class UseNewDeviceKeyInput: + """Use new device key input""" + + key: str + deviceName: str + + +@strawberry.type +class ApiMutations: + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def get_new_recovery_api_key( + self, limits: typing.Optional[RecoveryKeyLimitsInput] = None + ) -> ApiKeyMutationReturn: + """Generate recovery key""" + if limits is None: + limits = RecoveryKeyLimitsInput() + try: + key = get_new_api_recovery_key(limits.expiration_date, limits.uses) + except InvalidExpirationDate: + return ApiKeyMutationReturn( + success=False, + message="Expiration date must be in the future", + code=400, + key=None, + ) + except InvalidUsesLeft: + return ApiKeyMutationReturn( + success=False, + message="Uses must be greater than 0", + code=400, + key=None, + ) + return ApiKeyMutationReturn( + success=True, + message="Recovery key generated", + code=200, + key=key, + ) + + @strawberry.mutation() + def use_recovery_api_key( + self, input: UseRecoveryKeyInput + ) -> DeviceApiTokenMutationReturn: + """Use recovery key""" + token = use_mnemonic_recoverery_token(input.key, input.deviceName) + if token is None: + return DeviceApiTokenMutationReturn( + success=False, + message="Recovery key not found", + code=404, + token=None, + ) + return DeviceApiTokenMutationReturn( + success=True, + message="Recovery key used", + code=200, + token=token, + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def refresh_device_api_token(self, info: Info) -> DeviceApiTokenMutationReturn: + """Refresh device api token""" + token = ( + info.context["request"] + .headers.get("Authorization", "") + .replace("Bearer ", "") + ) + if token is None: + return DeviceApiTokenMutationReturn( + success=False, + message="Token not found", + code=404, + token=None, + ) + new_token = refresh_token(token) + if new_token is None: + return DeviceApiTokenMutationReturn( + success=False, + message="Token not found", + code=404, + token=None, + ) + return DeviceApiTokenMutationReturn( + success=True, + message="Token refreshed", + code=200, + token=new_token, + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def delete_device_api_token(self, device: str, info: Info) -> GenericMutationReturn: + """Delete device api token""" + self_token = ( + info.context["request"] + .headers.get("Authorization", "") + .replace("Bearer ", "") + ) + try: + delete_api_token(self_token, device) + except NotFoundException: + return GenericMutationReturn( + success=False, + message="Token not found", + code=404, + ) + except CannotDeleteCallerException: + return GenericMutationReturn( + success=False, + message="Cannot delete caller token", + code=400, + ) + except Exception as e: + return GenericMutationReturn( + success=False, + message=str(e), + code=500, + ) + return GenericMutationReturn( + success=True, + message="Token deleted", + code=200, + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def get_new_device_api_key(self) -> ApiKeyMutationReturn: + """Generate device api key""" + key = get_new_device_auth_token() + return ApiKeyMutationReturn( + success=True, + message="Device api key generated", + code=200, + key=key, + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def invalidate_new_device_api_key(self) -> GenericMutationReturn: + """Invalidate new device api key""" + delete_new_device_auth_token() + return GenericMutationReturn( + success=True, + message="New device key deleted", + code=200, + ) + + @strawberry.mutation() + def authorize_with_new_device_api_key( + self, input: UseNewDeviceKeyInput + ) -> DeviceApiTokenMutationReturn: + """Authorize with new device api key""" + token = use_new_device_auth_token(input.key, input.deviceName) + if token is None: + return DeviceApiTokenMutationReturn( + success=False, + message="Token not found", + code=404, + token=None, + ) + return DeviceApiTokenMutationReturn( + success=True, + message="Token used", + code=200, + token=token, + ) diff --git a/selfprivacy_api/graphql/mutations/job_mutations.py b/selfprivacy_api/graphql/mutations/job_mutations.py new file mode 100644 index 0000000..d3a3498 --- /dev/null +++ b/selfprivacy_api/graphql/mutations/job_mutations.py @@ -0,0 +1,27 @@ +"""Manipulate jobs""" +# pylint: disable=too-few-public-methods +import strawberry + +from selfprivacy_api.graphql.mutations.mutation_interface import GenericMutationReturn +from selfprivacy_api.jobs import Jobs + + +@strawberry.type +class JobMutations: + """Mutations related to jobs""" + + @strawberry.mutation + def remove_job(self, job_id: str) -> GenericMutationReturn: + """Remove a job from the queue""" + result = Jobs().remove_by_uuid(job_id) + if result: + return GenericMutationReturn( + success=True, + code=200, + message="Job removed", + ) + return GenericMutationReturn( + success=False, + code=404, + message="Job not found", + ) diff --git a/selfprivacy_api/graphql/mutations/mutation_interface.py b/selfprivacy_api/graphql/mutations/mutation_interface.py new file mode 100644 index 0000000..33a6b02 --- /dev/null +++ b/selfprivacy_api/graphql/mutations/mutation_interface.py @@ -0,0 +1,21 @@ +import strawberry +import typing + +from selfprivacy_api.graphql.common_types.jobs import ApiJob + + +@strawberry.interface +class MutationReturnInterface: + success: bool + message: str + code: int + + +@strawberry.type +class GenericMutationReturn(MutationReturnInterface): + pass + + +@strawberry.type +class GenericJobButationReturn(MutationReturnInterface): + job: typing.Optional[ApiJob] = None diff --git a/selfprivacy_api/graphql/mutations/services_mutations.py b/selfprivacy_api/graphql/mutations/services_mutations.py new file mode 100644 index 0000000..38a0d7f --- /dev/null +++ b/selfprivacy_api/graphql/mutations/services_mutations.py @@ -0,0 +1,169 @@ +"""Services mutations""" +# pylint: disable=too-few-public-methods +import typing +import strawberry +from selfprivacy_api.graphql import IsAuthenticated +from selfprivacy_api.graphql.common_types.jobs import job_to_api_job + +from selfprivacy_api.graphql.common_types.service import ( + Service, + service_to_graphql_service, +) +from selfprivacy_api.graphql.mutations.mutation_interface import ( + GenericJobButationReturn, + GenericMutationReturn, +) + +from selfprivacy_api.services import get_service_by_id +from selfprivacy_api.utils.block_devices import BlockDevices + + +@strawberry.type +class ServiceMutationReturn(GenericMutationReturn): + """Service mutation return type.""" + + service: typing.Optional[Service] = None + + +@strawberry.input +class MoveServiceInput: + """Move service input type.""" + + service_id: str + location: str + + +@strawberry.type +class ServiceJobMutationReturn(GenericJobButationReturn): + """Service job mutation return type.""" + + service: typing.Optional[Service] = None + + +@strawberry.type +class ServicesMutations: + """Services mutations.""" + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def enable_service(self, service_id: str) -> ServiceMutationReturn: + """Enable service.""" + service = get_service_by_id(service_id) + if service is None: + return ServiceMutationReturn( + success=False, + message="Service not found.", + code=404, + ) + service.enable() + return ServiceMutationReturn( + success=True, + message="Service enabled.", + code=200, + service=service_to_graphql_service(service), + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def disable_service(self, service_id: str) -> ServiceMutationReturn: + """Disable service.""" + service = get_service_by_id(service_id) + if service is None: + return ServiceMutationReturn( + success=False, + message="Service not found.", + code=404, + ) + service.disable() + return ServiceMutationReturn( + success=True, + message="Service disabled.", + code=200, + service=service_to_graphql_service(service), + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def stop_service(self, service_id: str) -> ServiceMutationReturn: + """Stop service.""" + service = get_service_by_id(service_id) + if service is None: + return ServiceMutationReturn( + success=False, + message="Service not found.", + code=404, + ) + service.stop() + return ServiceMutationReturn( + success=True, + message="Service stopped.", + code=200, + service=service_to_graphql_service(service), + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def start_service(self, service_id: str) -> ServiceMutationReturn: + """Start service.""" + service = get_service_by_id(service_id) + if service is None: + return ServiceMutationReturn( + success=False, + message="Service not found.", + code=404, + ) + service.start() + return ServiceMutationReturn( + success=True, + message="Service started.", + code=200, + service=service_to_graphql_service(service), + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def restart_service(self, service_id: str) -> ServiceMutationReturn: + """Restart service.""" + service = get_service_by_id(service_id) + if service is None: + return ServiceMutationReturn( + success=False, + message="Service not found.", + code=404, + ) + service.restart() + return ServiceMutationReturn( + success=True, + message="Service restarted.", + code=200, + service=service_to_graphql_service(service), + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def move_service(self, input: MoveServiceInput) -> ServiceJobMutationReturn: + """Move service.""" + service = get_service_by_id(input.service_id) + if service is None: + return ServiceJobMutationReturn( + success=False, + message="Service not found.", + code=404, + ) + if not service.is_movable(): + return ServiceJobMutationReturn( + success=False, + message="Service is not movable.", + code=400, + service=service_to_graphql_service(service), + ) + volume = BlockDevices().get_block_device(input.location) + if volume is None: + return ServiceJobMutationReturn( + success=False, + message="Volume not found.", + code=404, + service=service_to_graphql_service(service), + ) + job = service.move_to_volume(volume) + return ServiceJobMutationReturn( + success=True, + message="Service moved.", + code=200, + service=service_to_graphql_service(service), + job=job_to_api_job(job), + ) diff --git a/selfprivacy_api/graphql/mutations/ssh_mutations.py b/selfprivacy_api/graphql/mutations/ssh_mutations.py new file mode 100644 index 0000000..60f81a8 --- /dev/null +++ b/selfprivacy_api/graphql/mutations/ssh_mutations.py @@ -0,0 +1,102 @@ +#!/usr/bin/env python3 +"""Users management module""" +# pylint: disable=too-few-public-methods + +import strawberry +from selfprivacy_api.actions.users import UserNotFound + +from selfprivacy_api.graphql import IsAuthenticated +from selfprivacy_api.actions.ssh import ( + InvalidPublicKey, + KeyAlreadyExists, + KeyNotFound, + create_ssh_key, + remove_ssh_key, +) +from selfprivacy_api.graphql.common_types.user import ( + UserMutationReturn, + get_user_by_username, +) + + +@strawberry.input +class SshMutationInput: + """Input type for ssh mutation""" + + username: str + ssh_key: str + + +@strawberry.type +class SshMutations: + """Mutations ssh""" + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def add_ssh_key(self, ssh_input: SshMutationInput) -> UserMutationReturn: + """Add a new ssh key""" + + try: + create_ssh_key(ssh_input.username, ssh_input.ssh_key) + except KeyAlreadyExists: + return UserMutationReturn( + success=False, + message="Key already exists", + code=409, + ) + except InvalidPublicKey: + return UserMutationReturn( + success=False, + message="Invalid key type. Only ssh-ed25519 and ssh-rsa are supported", + code=400, + ) + except UserNotFound: + return UserMutationReturn( + success=False, + message="User not found", + code=404, + ) + except Exception as e: + return UserMutationReturn( + success=False, + message=str(e), + code=500, + ) + + return UserMutationReturn( + success=True, + message="New SSH key successfully written", + code=201, + user=get_user_by_username(ssh_input.username), + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def remove_ssh_key(self, ssh_input: SshMutationInput) -> UserMutationReturn: + """Remove ssh key from user""" + + try: + remove_ssh_key(ssh_input.username, ssh_input.ssh_key) + except KeyNotFound: + return UserMutationReturn( + success=False, + message="Key not found", + code=404, + ) + except UserNotFound: + return UserMutationReturn( + success=False, + message="User not found", + code=404, + ) + except Exception as e: + return UserMutationReturn( + success=False, + message=str(e), + code=500, + ) + + return UserMutationReturn( + success=True, + message="SSH key successfully removed", + code=200, + user=get_user_by_username(ssh_input.username), + ) diff --git a/selfprivacy_api/graphql/mutations/storage_mutations.py b/selfprivacy_api/graphql/mutations/storage_mutations.py new file mode 100644 index 0000000..1b6d74e --- /dev/null +++ b/selfprivacy_api/graphql/mutations/storage_mutations.py @@ -0,0 +1,102 @@ +"""Storage devices mutations""" +import strawberry +from selfprivacy_api.graphql import IsAuthenticated +from selfprivacy_api.graphql.common_types.jobs import job_to_api_job +from selfprivacy_api.utils.block_devices import BlockDevices +from selfprivacy_api.graphql.mutations.mutation_interface import ( + GenericJobButationReturn, + GenericMutationReturn, +) +from selfprivacy_api.jobs.migrate_to_binds import ( + BindMigrationConfig, + is_bind_migrated, + start_bind_migration, +) + + +@strawberry.input +class MigrateToBindsInput: + """Migrate to binds input""" + + email_block_device: str + bitwarden_block_device: str + gitea_block_device: str + nextcloud_block_device: str + pleroma_block_device: str + + +@strawberry.type +class StorageMutations: + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def resize_volume(self, name: str) -> GenericMutationReturn: + """Resize volume""" + volume = BlockDevices().get_block_device(name) + if volume is None: + return GenericMutationReturn( + success=False, code=404, message="Volume not found" + ) + volume.resize() + return GenericMutationReturn( + success=True, code=200, message="Volume resize started" + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def mount_volume(self, name: str) -> GenericMutationReturn: + """Mount volume""" + volume = BlockDevices().get_block_device(name) + if volume is None: + return GenericMutationReturn( + success=False, code=404, message="Volume not found" + ) + is_success = volume.mount() + if is_success: + return GenericMutationReturn( + success=True, + code=200, + message="Volume mounted, rebuild the system to apply changes", + ) + return GenericMutationReturn( + success=False, code=409, message="Volume not mounted (already mounted?)" + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def unmount_volume(self, name: str) -> GenericMutationReturn: + """Unmount volume""" + volume = BlockDevices().get_block_device(name) + if volume is None: + return GenericMutationReturn( + success=False, code=404, message="Volume not found" + ) + is_success = volume.unmount() + if is_success: + return GenericMutationReturn( + success=True, + code=200, + message="Volume unmounted, rebuild the system to apply changes", + ) + return GenericMutationReturn( + success=False, code=409, message="Volume not unmounted (already unmounted?)" + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def migrate_to_binds(self, input: MigrateToBindsInput) -> GenericJobButationReturn: + """Migrate to binds""" + if is_bind_migrated(): + return GenericJobButationReturn( + success=False, code=409, message="Already migrated to binds" + ) + job = start_bind_migration( + BindMigrationConfig( + email_block_device=input.email_block_device, + bitwarden_block_device=input.bitwarden_block_device, + gitea_block_device=input.gitea_block_device, + nextcloud_block_device=input.nextcloud_block_device, + pleroma_block_device=input.pleroma_block_device, + ) + ) + return GenericJobButationReturn( + success=True, + code=200, + message="Migration to binds started, rebuild the system to apply changes", + job=job_to_api_job(job), + ) diff --git a/selfprivacy_api/graphql/mutations/system_mutations.py b/selfprivacy_api/graphql/mutations/system_mutations.py new file mode 100644 index 0000000..daada17 --- /dev/null +++ b/selfprivacy_api/graphql/mutations/system_mutations.py @@ -0,0 +1,128 @@ +"""System management mutations""" +# pylint: disable=too-few-public-methods +import typing +import strawberry +from selfprivacy_api.graphql import IsAuthenticated +from selfprivacy_api.graphql.mutations.mutation_interface import ( + GenericMutationReturn, + MutationReturnInterface, +) + +import selfprivacy_api.actions.system as system_actions + + +@strawberry.type +class TimezoneMutationReturn(MutationReturnInterface): + """Return type of the timezone mutation, contains timezone""" + + timezone: typing.Optional[str] + + +@strawberry.type +class AutoUpgradeSettingsMutationReturn(MutationReturnInterface): + """Return type autoUpgrade Settings""" + + enableAutoUpgrade: bool + allowReboot: bool + + +@strawberry.input +class AutoUpgradeSettingsInput: + """Input type for auto upgrade settings""" + + enableAutoUpgrade: typing.Optional[bool] = None + allowReboot: typing.Optional[bool] = None + + +@strawberry.type +class SystemMutations: + """Mutations related to system settings""" + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def change_timezone(self, timezone: str) -> TimezoneMutationReturn: + """Change the timezone of the server. Timezone is a tzdatabase name.""" + try: + system_actions.change_timezone(timezone) + except system_actions.InvalidTimezone as e: + return TimezoneMutationReturn( + success=False, + message=str(e), + code=400, + timezone=None, + ) + return TimezoneMutationReturn( + success=True, + message="Timezone changed", + code=200, + timezone=timezone, + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def change_auto_upgrade_settings( + self, settings: AutoUpgradeSettingsInput + ) -> AutoUpgradeSettingsMutationReturn: + """Change auto upgrade settings of the server.""" + system_actions.set_auto_upgrade_settings( + settings.enableAutoUpgrade, settings.allowReboot + ) + + new_settings = system_actions.get_auto_upgrade_settings() + + return AutoUpgradeSettingsMutationReturn( + success=True, + message="Auto-upgrade settings changed", + code=200, + enableAutoUpgrade=new_settings.enable, + allowReboot=new_settings.allowReboot, + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def run_system_rebuild(self) -> GenericMutationReturn: + system_actions.rebuild_system() + return GenericMutationReturn( + success=True, + message="Starting rebuild system", + code=200, + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def run_system_rollback(self) -> GenericMutationReturn: + system_actions.rollback_system() + return GenericMutationReturn( + success=True, + message="Starting rebuild system", + code=200, + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def run_system_upgrade(self) -> GenericMutationReturn: + system_actions.upgrade_system() + return GenericMutationReturn( + success=True, + message="Starting rebuild system", + code=200, + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def reboot_system(self) -> GenericMutationReturn: + system_actions.reboot_system() + return GenericMutationReturn( + success=True, + message="System reboot has started", + code=200, + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def pull_repository_changes(self) -> GenericMutationReturn: + result = system_actions.pull_repository_changes() + if result.status == 0: + return GenericMutationReturn( + success=True, + message="Repository changes pulled", + code=200, + ) + return GenericMutationReturn( + success=False, + message=f"Failed to pull repository changes:\n{result.data}", + code=500, + ) diff --git a/selfprivacy_api/graphql/mutations/users_mutations.py b/selfprivacy_api/graphql/mutations/users_mutations.py new file mode 100644 index 0000000..27be1d7 --- /dev/null +++ b/selfprivacy_api/graphql/mutations/users_mutations.py @@ -0,0 +1,117 @@ +#!/usr/bin/env python3 +"""Users management module""" +# pylint: disable=too-few-public-methods +import strawberry +from selfprivacy_api.graphql import IsAuthenticated +from selfprivacy_api.graphql.common_types.user import ( + UserMutationReturn, + get_user_by_username, +) +from selfprivacy_api.graphql.mutations.mutation_interface import ( + GenericMutationReturn, +) +import selfprivacy_api.actions.users as users_actions + + +@strawberry.input +class UserMutationInput: + """Input type for user mutation""" + + username: str + password: str + + +@strawberry.type +class UserMutations: + """Mutations change user settings""" + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def create_user(self, user: UserMutationInput) -> UserMutationReturn: + try: + users_actions.create_user(user.username, user.password) + except users_actions.PasswordIsEmpty as e: + return UserMutationReturn( + success=False, + message=str(e), + code=400, + ) + except users_actions.UsernameForbidden as e: + return UserMutationReturn( + success=False, + message=str(e), + code=409, + ) + except users_actions.UsernameNotAlphanumeric as e: + return UserMutationReturn( + success=False, + message=str(e), + code=400, + ) + except users_actions.UsernameTooLong as e: + return UserMutationReturn( + success=False, + message=str(e), + code=400, + ) + except users_actions.UserAlreadyExists as e: + return UserMutationReturn( + success=False, + message=str(e), + code=409, + user=get_user_by_username(user.username), + ) + + return UserMutationReturn( + success=True, + message="User created", + code=201, + user=get_user_by_username(user.username), + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def delete_user(self, username: str) -> GenericMutationReturn: + try: + users_actions.delete_user(username) + except users_actions.UserNotFound as e: + return GenericMutationReturn( + success=False, + message=str(e), + code=404, + ) + except users_actions.UserIsProtected as e: + return GenericMutationReturn( + success=False, + message=str(e), + code=400, + ) + + return GenericMutationReturn( + success=True, + message="User deleted", + code=200, + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def update_user(self, user: UserMutationInput) -> UserMutationReturn: + """Update user mutation""" + try: + users_actions.update_user(user.username, user.password) + except users_actions.PasswordIsEmpty as e: + return UserMutationReturn( + success=False, + message=str(e), + code=400, + ) + except users_actions.UserNotFound as e: + return UserMutationReturn( + success=False, + message=str(e), + code=404, + ) + + return UserMutationReturn( + success=True, + message="User updated", + code=200, + user=get_user_by_username(user.username), + ) diff --git a/selfprivacy_api/graphql/queries/__init__.py b/selfprivacy_api/graphql/queries/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/selfprivacy_api/graphql/queries/api_queries.py b/selfprivacy_api/graphql/queries/api_queries.py new file mode 100644 index 0000000..7994a8f --- /dev/null +++ b/selfprivacy_api/graphql/queries/api_queries.py @@ -0,0 +1,97 @@ +"""API access status""" +# pylint: disable=too-few-public-methods +import datetime +import typing +import strawberry +from strawberry.types import Info +from selfprivacy_api.actions.api_tokens import get_api_tokens_with_caller_flag +from selfprivacy_api.graphql import IsAuthenticated +from selfprivacy_api.utils import parse_date +from selfprivacy_api.dependencies import get_api_version as get_api_version_dependency + +from selfprivacy_api.utils.auth import ( + get_recovery_token_status, + is_recovery_token_exists, + is_recovery_token_valid, +) + + +def get_api_version() -> str: + """Get API version""" + return get_api_version_dependency() + + +@strawberry.type +class ApiDevice: + """A single device with SelfPrivacy app installed""" + + name: str + creation_date: datetime.datetime + is_caller: bool + + +@strawberry.type +class ApiRecoveryKeyStatus: + """Recovery key status""" + + exists: bool + valid: bool + creation_date: typing.Optional[datetime.datetime] + expiration_date: typing.Optional[datetime.datetime] + uses_left: typing.Optional[int] + + +def get_recovery_key_status() -> ApiRecoveryKeyStatus: + """Get recovery key status""" + if not is_recovery_token_exists(): + return ApiRecoveryKeyStatus( + exists=False, + valid=False, + creation_date=None, + expiration_date=None, + uses_left=None, + ) + status = get_recovery_token_status() + if status is None: + return ApiRecoveryKeyStatus( + exists=False, + valid=False, + creation_date=None, + expiration_date=None, + uses_left=None, + ) + return ApiRecoveryKeyStatus( + exists=True, + valid=is_recovery_token_valid(), + creation_date=parse_date(status["date"]), + expiration_date=parse_date(status["expiration"]) + if status["expiration"] is not None + else None, + uses_left=status["uses_left"] if status["uses_left"] is not None else None, + ) + + +@strawberry.type +class Api: + """API access status""" + + version: str = strawberry.field(resolver=get_api_version) + + @strawberry.field(permission_classes=[IsAuthenticated]) + def devices(self, info: Info) -> typing.List[ApiDevice]: + return [ + ApiDevice( + name=device.name, + creation_date=device.date, + is_caller=device.is_caller, + ) + for device in get_api_tokens_with_caller_flag( + info.context["request"] + .headers.get("Authorization", "") + .replace("Bearer ", "") + ) + ] + + recovery_key: ApiRecoveryKeyStatus = strawberry.field( + resolver=get_recovery_key_status, permission_classes=[IsAuthenticated] + ) diff --git a/selfprivacy_api/graphql/queries/common.py b/selfprivacy_api/graphql/queries/common.py new file mode 100644 index 0000000..a1abbdc --- /dev/null +++ b/selfprivacy_api/graphql/queries/common.py @@ -0,0 +1,30 @@ +"""Common types and enums used by different types of queries.""" +from enum import Enum +import datetime +import typing +import strawberry + + +@strawberry.enum +class Severity(Enum): + """ + Severity of an alert. + """ + + INFO = "INFO" + WARNING = "WARNING" + ERROR = "ERROR" + CRITICAL = "CRITICAL" + SUCCESS = "SUCCESS" + + +@strawberry.type +class Alert: + """ + Alert type. + """ + + severity: Severity + title: str + message: str + timestamp: typing.Optional[datetime.datetime] diff --git a/selfprivacy_api/graphql/queries/jobs.py b/selfprivacy_api/graphql/queries/jobs.py new file mode 100644 index 0000000..426c563 --- /dev/null +++ b/selfprivacy_api/graphql/queries/jobs.py @@ -0,0 +1,25 @@ +"""Jobs status""" +# pylint: disable=too-few-public-methods +import typing +import strawberry +from selfprivacy_api.graphql.common_types.jobs import ( + ApiJob, + get_api_job_by_id, + job_to_api_job, +) + +from selfprivacy_api.jobs import Jobs + + +@strawberry.type +class Job: + @strawberry.field + def get_jobs(self) -> typing.List[ApiJob]: + + Jobs.get_instance().get_jobs() + + return [job_to_api_job(job) for job in Jobs.get_instance().get_jobs()] + + @strawberry.field + def get_job(self, job_id: str) -> typing.Optional[ApiJob]: + return get_api_job_by_id(job_id) diff --git a/selfprivacy_api/graphql/queries/providers.py b/selfprivacy_api/graphql/queries/providers.py new file mode 100644 index 0000000..6d0381e --- /dev/null +++ b/selfprivacy_api/graphql/queries/providers.py @@ -0,0 +1,13 @@ +"""Enums representing different service providers.""" +from enum import Enum +import strawberry + + +@strawberry.enum +class DnsProvider(Enum): + CLOUDFLARE = "CLOUDFLARE" + + +@strawberry.enum +class ServerProvider(Enum): + HETZNER = "HETZNER" diff --git a/selfprivacy_api/graphql/queries/services.py b/selfprivacy_api/graphql/queries/services.py new file mode 100644 index 0000000..5398f81 --- /dev/null +++ b/selfprivacy_api/graphql/queries/services.py @@ -0,0 +1,18 @@ +"""Services status""" +# pylint: disable=too-few-public-methods +import typing +import strawberry + +from selfprivacy_api.graphql.common_types.service import ( + Service, + service_to_graphql_service, +) +from selfprivacy_api.services import get_all_services + + +@strawberry.type +class Services: + @strawberry.field + def all_services(self) -> typing.List[Service]: + services = get_all_services() + return [service_to_graphql_service(service) for service in services] diff --git a/selfprivacy_api/graphql/queries/storage.py b/selfprivacy_api/graphql/queries/storage.py new file mode 100644 index 0000000..6800518 --- /dev/null +++ b/selfprivacy_api/graphql/queries/storage.py @@ -0,0 +1,33 @@ +"""Storage queries.""" +# pylint: disable=too-few-public-methods +import typing +import strawberry + +from selfprivacy_api.graphql.common_types.service import ( + StorageVolume, +) +from selfprivacy_api.utils.block_devices import BlockDevices + + +@strawberry.type +class Storage: + """GraphQL queries to get storage information.""" + + @strawberry.field + def volumes(self) -> typing.List[StorageVolume]: + """Get list of volumes""" + return [ + StorageVolume( + total_space=str(volume.fssize) + if volume.fssize is not None + else str(volume.size), + free_space=str(volume.fsavail), + used_space=str(volume.fsused), + root=volume.name == "sda1", + name=volume.name, + model=volume.model, + serial=volume.serial, + type=volume.type, + ) + for volume in BlockDevices().get_block_devices() + ] diff --git a/selfprivacy_api/graphql/queries/system.py b/selfprivacy_api/graphql/queries/system.py new file mode 100644 index 0000000..0e2a7ec --- /dev/null +++ b/selfprivacy_api/graphql/queries/system.py @@ -0,0 +1,166 @@ +"""Common system information and settings""" +# pylint: disable=too-few-public-methods +import os +import typing +import strawberry +from selfprivacy_api.graphql.common_types.dns import DnsRecord + +from selfprivacy_api.graphql.queries.common import Alert, Severity +from selfprivacy_api.graphql.queries.providers import DnsProvider, ServerProvider +from selfprivacy_api.jobs import Jobs +from selfprivacy_api.jobs.migrate_to_binds import is_bind_migrated +from selfprivacy_api.services import get_all_required_dns_records +from selfprivacy_api.utils import ReadUserData +import selfprivacy_api.actions.system as system_actions +import selfprivacy_api.actions.ssh as ssh_actions + + +@strawberry.type +class SystemDomainInfo: + """Information about the system domain""" + + domain: str + hostname: str + provider: DnsProvider + + @strawberry.field + def required_dns_records(self) -> typing.List[DnsRecord]: + """Collect all required DNS records for all services""" + return [ + DnsRecord( + record_type=record.type, + name=record.name, + content=record.content, + ttl=record.ttl, + priority=record.priority, + ) + for record in get_all_required_dns_records() + ] + + +def get_system_domain_info() -> SystemDomainInfo: + """Get basic system domain info""" + with ReadUserData() as user_data: + return SystemDomainInfo( + domain=user_data["domain"], + hostname=user_data["hostname"], + provider=DnsProvider.CLOUDFLARE, + ) + + +@strawberry.type +class AutoUpgradeOptions: + """Automatic upgrade options""" + + enable: bool + allow_reboot: bool + + +def get_auto_upgrade_options() -> AutoUpgradeOptions: + """Get automatic upgrade options""" + settings = system_actions.get_auto_upgrade_settings() + return AutoUpgradeOptions( + enable=settings.enable, + allow_reboot=settings.allowReboot, + ) + + +@strawberry.type +class SshSettings: + """SSH settings and root SSH keys""" + + enable: bool + password_authentication: bool + root_ssh_keys: typing.List[str] + + +def get_ssh_settings() -> SshSettings: + """Get SSH settings""" + settings = ssh_actions.get_ssh_settings() + return SshSettings( + enable=settings.enable, + password_authentication=settings.passwordAuthentication, + root_ssh_keys=settings.rootKeys, + ) + + +def get_system_timezone() -> str: + """Get system timezone""" + return system_actions.get_timezone() + + +@strawberry.type +class SystemSettings: + """Common system settings""" + + auto_upgrade: AutoUpgradeOptions = strawberry.field( + resolver=get_auto_upgrade_options + ) + ssh: SshSettings = strawberry.field(resolver=get_ssh_settings) + timezone: str = strawberry.field(resolver=get_system_timezone) + + +def get_system_version() -> str: + """Get system version""" + return system_actions.get_system_version() + + +def get_python_version() -> str: + """Get Python version""" + return system_actions.get_python_version() + + +@strawberry.type +class SystemInfo: + """System components versions""" + + system_version: str = strawberry.field(resolver=get_system_version) + python_version: str = strawberry.field(resolver=get_python_version) + + @strawberry.field + def using_binds(self) -> bool: + """Check if the system is using BINDs""" + return is_bind_migrated() + + +@strawberry.type +class SystemProviderInfo: + """Information about the VPS/Dedicated server provider""" + + provider: ServerProvider + id: str + + +def get_system_provider_info() -> SystemProviderInfo: + """Get system provider info""" + return SystemProviderInfo(provider=ServerProvider.HETZNER, id="UNKNOWN") + + +@strawberry.type +class System: + """ + Base system type which represents common system status + """ + + status: Alert = strawberry.field( + resolver=lambda: Alert( + severity=Severity.INFO, + title="Test message", + message="Test message", + timestamp=None, + ) + ) + domain_info: SystemDomainInfo = strawberry.field(resolver=get_system_domain_info) + settings: SystemSettings = SystemSettings() + info: SystemInfo = SystemInfo() + provider: SystemProviderInfo = strawberry.field(resolver=get_system_provider_info) + + @strawberry.field + def busy(self) -> bool: + """Check if the system is busy""" + return Jobs.is_busy() + + @strawberry.field + def working_directory(self) -> str: + """Get working directory""" + return os.getcwd() diff --git a/selfprivacy_api/graphql/queries/users.py b/selfprivacy_api/graphql/queries/users.py new file mode 100644 index 0000000..d2c0555 --- /dev/null +++ b/selfprivacy_api/graphql/queries/users.py @@ -0,0 +1,23 @@ +"""Users""" +# pylint: disable=too-few-public-methods +import typing +import strawberry + +from selfprivacy_api.graphql.common_types.user import ( + User, + get_user_by_username, + get_users, +) +from selfprivacy_api.graphql import IsAuthenticated + + +@strawberry.type +class Users: + @strawberry.field(permission_classes=[IsAuthenticated]) + def get_user(self, username: str) -> typing.Optional[User]: + """Get users""" + return get_user_by_username(username) + + all_users: typing.List[User] = strawberry.field( + permission_classes=[IsAuthenticated], resolver=get_users + ) diff --git a/selfprivacy_api/graphql/schema.py b/selfprivacy_api/graphql/schema.py new file mode 100644 index 0000000..dff9304 --- /dev/null +++ b/selfprivacy_api/graphql/schema.py @@ -0,0 +1,98 @@ +"""GraphQL API for SelfPrivacy.""" +# pylint: disable=too-few-public-methods + +import asyncio +from typing import AsyncGenerator +import strawberry +from selfprivacy_api.graphql import IsAuthenticated +from selfprivacy_api.graphql.mutations.api_mutations import ApiMutations +from selfprivacy_api.graphql.mutations.job_mutations import JobMutations +from selfprivacy_api.graphql.mutations.mutation_interface import GenericMutationReturn +from selfprivacy_api.graphql.mutations.services_mutations import ServicesMutations +from selfprivacy_api.graphql.mutations.ssh_mutations import SshMutations +from selfprivacy_api.graphql.mutations.storage_mutations import StorageMutations +from selfprivacy_api.graphql.mutations.system_mutations import SystemMutations + +from selfprivacy_api.graphql.queries.api_queries import Api +from selfprivacy_api.graphql.queries.jobs import Job +from selfprivacy_api.graphql.queries.services import Services +from selfprivacy_api.graphql.queries.storage import Storage +from selfprivacy_api.graphql.queries.system import System + +from selfprivacy_api.graphql.mutations.users_mutations import UserMutations +from selfprivacy_api.graphql.queries.users import Users +from selfprivacy_api.jobs.test import test_job + + +@strawberry.type +class Query: + """Root schema for queries""" + + @strawberry.field(permission_classes=[IsAuthenticated]) + def system(self) -> System: + """System queries""" + return System() + + @strawberry.field + def api(self) -> Api: + """API access status""" + return Api() + + @strawberry.field(permission_classes=[IsAuthenticated]) + def users(self) -> Users: + """Users queries""" + return Users() + + @strawberry.field(permission_classes=[IsAuthenticated]) + def storage(self) -> Storage: + """Storage queries""" + return Storage() + + @strawberry.field(permission_classes=[IsAuthenticated]) + def jobs(self) -> Job: + """Jobs queries""" + return Job() + + @strawberry.field(permission_classes=[IsAuthenticated]) + def services(self) -> Services: + """Services queries""" + return Services() + + +@strawberry.type +class Mutation( + ApiMutations, + SystemMutations, + UserMutations, + SshMutations, + StorageMutations, + ServicesMutations, + JobMutations, +): + """Root schema for mutations""" + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def test_mutation(self) -> GenericMutationReturn: + """Test mutation""" + test_job() + return GenericMutationReturn( + success=True, + message="Test mutation", + code=200, + ) + + pass + + +@strawberry.type +class Subscription: + """Root schema for subscriptions""" + + @strawberry.subscription(permission_classes=[IsAuthenticated]) + async def count(self, target: int = 100) -> AsyncGenerator[int, None]: + for i in range(target): + yield i + await asyncio.sleep(0.5) + + +schema = strawberry.Schema(query=Query, mutation=Mutation, subscription=Subscription) diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py new file mode 100644 index 0000000..09ac254 --- /dev/null +++ b/selfprivacy_api/jobs/__init__.py @@ -0,0 +1,227 @@ +""" +Jobs controller. It handles the jobs that are created by the user. +This is a singleton class holding the jobs list. +Jobs can be added and removed. +A single job can be updated. +A job is a dictionary with the following keys: + - id: unique identifier of the job + - name: name of the job + - description: description of the job + - status: status of the job + - created_at: date of creation of the job + - updated_at: date of last update of the job + - finished_at: date of finish of the job + - error: error message if the job failed + - result: result of the job +""" +import typing +import datetime +from uuid import UUID +import asyncio +import json +import os +import time +import uuid +from enum import Enum + +from pydantic import BaseModel + +from selfprivacy_api.utils import ReadUserData, UserDataFiles, WriteUserData + + +class JobStatus(Enum): + """ + Status of a job. + """ + + CREATED = "CREATED" + RUNNING = "RUNNING" + FINISHED = "FINISHED" + ERROR = "ERROR" + + +class Job(BaseModel): + """ + Job class. + """ + + uid: UUID = uuid.uuid4() + type_id: str + name: str + description: str + status: JobStatus + status_text: typing.Optional[str] + progress: typing.Optional[int] + created_at: datetime.datetime + updated_at: datetime.datetime + finished_at: typing.Optional[datetime.datetime] + error: typing.Optional[str] + result: typing.Optional[str] + + +class Jobs: + """ + Jobs class. + """ + + __instance = None + + @staticmethod + def get_instance(): + """ + Singleton method. + """ + if Jobs.__instance is None: + Jobs() + if Jobs.__instance is None: + raise Exception("Couldn't init Jobs singleton!") + return Jobs.__instance + return Jobs.__instance + + def __init__(self): + """ + Initialize the jobs list. + """ + if Jobs.__instance is not None: + raise Exception("This class is a singleton!") + else: + Jobs.__instance = self + + @staticmethod + def reset() -> None: + """ + Reset the jobs list. + """ + with WriteUserData(UserDataFiles.JOBS) as user_data: + user_data["jobs"] = [] + + @staticmethod + def add( + name: str, + type_id: str, + description: str, + status: JobStatus = JobStatus.CREATED, + status_text: str = "", + progress: int = 0, + ) -> Job: + """ + Add a job to the jobs list. + """ + job = Job( + name=name, + type_id=type_id, + description=description, + status=status, + status_text=status_text, + progress=progress, + created_at=datetime.datetime.now(), + updated_at=datetime.datetime.now(), + finished_at=None, + error=None, + result=None, + ) + with WriteUserData(UserDataFiles.JOBS) as user_data: + try: + if "jobs" not in user_data: + user_data["jobs"] = [] + user_data["jobs"].append(json.loads(job.json())) + except json.decoder.JSONDecodeError: + user_data["jobs"] = [json.loads(job.json())] + return job + + def remove(self, job: Job) -> None: + """ + Remove a job from the jobs list. + """ + self.remove_by_uuid(str(job.uid)) + + def remove_by_uuid(self, job_uuid: str) -> bool: + """ + Remove a job from the jobs list. + """ + with WriteUserData(UserDataFiles.JOBS) as user_data: + if "jobs" not in user_data: + user_data["jobs"] = [] + for i, j in enumerate(user_data["jobs"]): + if j["uid"] == job_uuid: + del user_data["jobs"][i] + return True + return False + + @staticmethod + def update( + job: Job, + status: JobStatus, + status_text: typing.Optional[str] = None, + progress: typing.Optional[int] = None, + name: typing.Optional[str] = None, + description: typing.Optional[str] = None, + error: typing.Optional[str] = None, + result: typing.Optional[str] = None, + ) -> Job: + """ + Update a job in the jobs list. + """ + if name is not None: + job.name = name + if description is not None: + job.description = description + if status_text is not None: + job.status_text = status_text + if progress is not None: + job.progress = progress + job.status = status + job.updated_at = datetime.datetime.now() + job.error = error + job.result = result + if status in (JobStatus.FINISHED, JobStatus.ERROR): + job.finished_at = datetime.datetime.now() + + with WriteUserData(UserDataFiles.JOBS) as user_data: + if "jobs" not in user_data: + user_data["jobs"] = [] + for i, j in enumerate(user_data["jobs"]): + if j["uid"] == str(job.uid): + user_data["jobs"][i] = json.loads(job.json()) + break + + return job + + @staticmethod + def get_job(uid: str) -> typing.Optional[Job]: + """ + Get a job from the jobs list. + """ + with ReadUserData(UserDataFiles.JOBS) as user_data: + if "jobs" not in user_data: + user_data["jobs"] = [] + for job in user_data["jobs"]: + if job["uid"] == uid: + return Job(**job) + return None + + @staticmethod + def get_jobs() -> typing.List[Job]: + """ + Get the jobs list. + """ + with ReadUserData(UserDataFiles.JOBS) as user_data: + try: + if "jobs" not in user_data: + user_data["jobs"] = [] + return [Job(**job) for job in user_data["jobs"]] + except json.decoder.JSONDecodeError: + return [] + + @staticmethod + def is_busy() -> bool: + """ + Check if there is a job running. + """ + with ReadUserData(UserDataFiles.JOBS) as user_data: + if "jobs" not in user_data: + user_data["jobs"] = [] + for job in user_data["jobs"]: + if job["status"] == JobStatus.RUNNING.value: + return True + return False diff --git a/selfprivacy_api/jobs/migrate_to_binds.py b/selfprivacy_api/jobs/migrate_to_binds.py new file mode 100644 index 0000000..346023d --- /dev/null +++ b/selfprivacy_api/jobs/migrate_to_binds.py @@ -0,0 +1,291 @@ +"""Function to perform migration of app data to binds.""" +import subprocess +import pathlib +import shutil + +from pydantic import BaseModel +from selfprivacy_api.jobs import Job, JobStatus, Jobs +from selfprivacy_api.services.bitwarden import Bitwarden +from selfprivacy_api.services.gitea import Gitea +from selfprivacy_api.services.mailserver import MailServer +from selfprivacy_api.services.nextcloud import Nextcloud +from selfprivacy_api.services.pleroma import Pleroma +from selfprivacy_api.utils import ReadUserData, WriteUserData +from selfprivacy_api.utils.huey import huey +from selfprivacy_api.utils.block_devices import BlockDevices + + +class BindMigrationConfig(BaseModel): + """Config for bind migration. + For each service provide block device name. + """ + + email_block_device: str + bitwarden_block_device: str + gitea_block_device: str + nextcloud_block_device: str + pleroma_block_device: str + + +def is_bind_migrated() -> bool: + """Check if bind migration was performed.""" + with ReadUserData() as user_data: + return user_data.get("useBinds", False) + + +def activate_binds(config: BindMigrationConfig): + """Activate binds.""" + # Activate binds in userdata + with WriteUserData() as user_data: + if "email" not in user_data: + user_data["email"] = {} + user_data["email"]["location"] = config.email_block_device + if "bitwarden" not in user_data: + user_data["bitwarden"] = {} + user_data["bitwarden"]["location"] = config.bitwarden_block_device + if "gitea" not in user_data: + user_data["gitea"] = {} + user_data["gitea"]["location"] = config.gitea_block_device + if "nextcloud" not in user_data: + user_data["nextcloud"] = {} + user_data["nextcloud"]["location"] = config.nextcloud_block_device + if "pleroma" not in user_data: + user_data["pleroma"] = {} + user_data["pleroma"]["location"] = config.pleroma_block_device + + user_data["useBinds"] = True + + +def move_folder( + data_path: pathlib.Path, bind_path: pathlib.Path, user: str, group: str +): + """Move folder from data to bind.""" + if data_path.exists(): + shutil.move(str(data_path), str(bind_path)) + else: + return + + data_path.mkdir(mode=0o750, parents=True, exist_ok=True) + + shutil.chown(str(bind_path), user=user, group=group) + shutil.chown(str(data_path), user=user, group=group) + + subprocess.run(["mount", "--bind", str(bind_path), str(data_path)], check=True) + + subprocess.run(["chown", "-R", f"{user}:{group}", str(data_path)], check=True) + + +@huey.task() +def migrate_to_binds(config: BindMigrationConfig, job: Job): + """Migrate app data to binds.""" + + # Exit if migration is already done + if is_bind_migrated(): + Jobs.update( + job=job, + status=JobStatus.ERROR, + error="Migration already done.", + ) + return + + Jobs.update( + job=job, + status=JobStatus.RUNNING, + progress=0, + status_text="Checking if all volumes are available.", + ) + # Get block devices. + block_devices = BlockDevices().get_block_devices() + block_device_names = [device.name for device in block_devices] + + # Get all unique required block devices + required_block_devices = [] + for block_device_name in config.__dict__.values(): + if block_device_name not in required_block_devices: + required_block_devices.append(block_device_name) + + # Check if all block devices from config are present. + for block_device_name in required_block_devices: + if block_device_name not in block_device_names: + Jobs.update( + job=job, + status=JobStatus.ERROR, + error=f"Block device {block_device_name} not found.", + ) + return + + # Make sure all required block devices are mounted. + # sda1 is the root partition and is always mounted. + for block_device_name in required_block_devices: + if block_device_name == "sda1": + continue + block_device = BlockDevices().get_block_device(block_device_name) + if block_device is None: + Jobs.update( + job=job, + status=JobStatus.ERROR, + error=f"Block device {block_device_name} not found.", + ) + return + if f"/volumes/{block_device_name}" not in block_device.mountpoints: + Jobs.update( + job=job, + status=JobStatus.ERROR, + error=f"Block device {block_device_name} not mounted.", + ) + return + + # Make sure /volumes/sda1 exists. + pathlib.Path("/volumes/sda1").mkdir(parents=True, exist_ok=True) + + Jobs.update( + job=job, + status=JobStatus.RUNNING, + progress=5, + status_text="Activating binds in NixOS config.", + ) + + activate_binds(config) + + # Perform migration of Nextcloud. + Jobs.update( + job=job, + status=JobStatus.RUNNING, + progress=10, + status_text="Migrating Nextcloud.", + ) + + Nextcloud().stop() + + move_folder( + data_path=pathlib.Path("/var/lib/nextcloud"), + bind_path=pathlib.Path(f"/volumes/{config.nextcloud_block_device}/nextcloud"), + user="nextcloud", + group="nextcloud", + ) + + # Start Nextcloud + Nextcloud().start() + + # Perform migration of Bitwarden + + Jobs.update( + job=job, + status=JobStatus.RUNNING, + progress=28, + status_text="Migrating Bitwarden.", + ) + + Bitwarden().stop() + + move_folder( + data_path=pathlib.Path("/var/lib/bitwarden"), + bind_path=pathlib.Path(f"/volumes/{config.bitwarden_block_device}/bitwarden"), + user="vaultwarden", + group="vaultwarden", + ) + + move_folder( + data_path=pathlib.Path("/var/lib/bitwarden_rs"), + bind_path=pathlib.Path( + f"/volumes/{config.bitwarden_block_device}/bitwarden_rs" + ), + user="vaultwarden", + group="vaultwarden", + ) + + # Start Bitwarden + Bitwarden().start() + + # Perform migration of Gitea + + Jobs.update( + job=job, + status=JobStatus.RUNNING, + progress=46, + status_text="Migrating Gitea.", + ) + + Gitea().stop() + + move_folder( + data_path=pathlib.Path("/var/lib/gitea"), + bind_path=pathlib.Path(f"/volumes/{config.gitea_block_device}/gitea"), + user="gitea", + group="gitea", + ) + + Gitea().start() + + # Perform migration of Mail server + + Jobs.update( + job=job, + status=JobStatus.RUNNING, + progress=64, + status_text="Migrating Mail server.", + ) + + MailServer().stop() + + move_folder( + data_path=pathlib.Path("/var/vmail"), + bind_path=pathlib.Path(f"/volumes/{config.email_block_device}/vmail"), + user="virtualMail", + group="virtualMail", + ) + + move_folder( + data_path=pathlib.Path("/var/sieve"), + bind_path=pathlib.Path(f"/volumes/{config.email_block_device}/sieve"), + user="virtualMail", + group="virtualMail", + ) + + MailServer().start() + + # Perform migration of Pleroma + + Jobs.update( + job=job, + status=JobStatus.RUNNING, + progress=82, + status_text="Migrating Pleroma.", + ) + + Pleroma().stop() + + move_folder( + data_path=pathlib.Path("/var/lib/pleroma"), + bind_path=pathlib.Path(f"/volumes/{config.pleroma_block_device}/pleroma"), + user="pleroma", + group="pleroma", + ) + + move_folder( + data_path=pathlib.Path("/var/lib/postgresql"), + bind_path=pathlib.Path(f"/volumes/{config.pleroma_block_device}/postgresql"), + user="postgres", + group="postgres", + ) + + Pleroma().start() + + Jobs.update( + job=job, + status=JobStatus.FINISHED, + progress=100, + status_text="Migration finished.", + result="Migration finished.", + ) + + +def start_bind_migration(config: BindMigrationConfig) -> Job: + """Start migration.""" + job = Jobs.add( + type_id="migrations.migrate_to_binds", + name="Migrate to binds", + description="Migration required to use the new disk space management.", + ) + migrate_to_binds(config, job) + return job diff --git a/selfprivacy_api/jobs/test.py b/selfprivacy_api/jobs/test.py new file mode 100644 index 0000000..9d93fb7 --- /dev/null +++ b/selfprivacy_api/jobs/test.py @@ -0,0 +1,57 @@ +import time +from selfprivacy_api.utils.huey import huey +from selfprivacy_api.jobs import JobStatus, Jobs + + +@huey.task() +def test_job(): + job = Jobs.get_instance().add( + type_id="test", + name="Test job", + description="This is a test job.", + status=JobStatus.CREATED, + status_text="", + progress=0, + ) + time.sleep(5) + Jobs.get_instance().update( + job=job, + status=JobStatus.RUNNING, + status_text="Performing pre-move checks...", + progress=5, + ) + time.sleep(5) + Jobs.get_instance().update( + job=job, + status=JobStatus.RUNNING, + status_text="Performing pre-move checks...", + progress=10, + ) + time.sleep(5) + Jobs.get_instance().update( + job=job, + status=JobStatus.RUNNING, + status_text="Performing pre-move checks...", + progress=15, + ) + time.sleep(5) + Jobs.get_instance().update( + job=job, + status=JobStatus.RUNNING, + status_text="Performing pre-move checks...", + progress=20, + ) + time.sleep(5) + Jobs.get_instance().update( + job=job, + status=JobStatus.RUNNING, + status_text="Performing pre-move checks...", + progress=25, + ) + time.sleep(5) + Jobs.get_instance().update( + job=job, + status=JobStatus.FINISHED, + status_text="Job finished.", + progress=100, + ) diff --git a/selfprivacy_api/migrations/__init__.py b/selfprivacy_api/migrations/__init__.py index 4eeebab..2149e69 100644 --- a/selfprivacy_api/migrations/__init__.py +++ b/selfprivacy_api/migrations/__init__.py @@ -14,8 +14,14 @@ from selfprivacy_api.migrations.create_tokens_json import CreateTokensJson from selfprivacy_api.migrations.migrate_to_selfprivacy_channel import ( MigrateToSelfprivacyChannel, ) +from selfprivacy_api.migrations.mount_volume import MountVolume -migrations = [FixNixosConfigBranch(), CreateTokensJson(), MigrateToSelfprivacyChannel()] +migrations = [ + FixNixosConfigBranch(), + CreateTokensJson(), + MigrateToSelfprivacyChannel(), + MountVolume(), +] def run_migrations(): diff --git a/selfprivacy_api/migrations/migrate_to_selfprivacy_channel.py b/selfprivacy_api/migrations/migrate_to_selfprivacy_channel.py index 5f98f39..9bfd670 100644 --- a/selfprivacy_api/migrations/migrate_to_selfprivacy_channel.py +++ b/selfprivacy_api/migrations/migrate_to_selfprivacy_channel.py @@ -15,20 +15,16 @@ class MigrateToSelfprivacyChannel(Migration): def is_migration_needed(self): try: - print("Checking if migration is needed") output = subprocess.check_output( ["nix-channel", "--list"], start_new_session=True ) output = output.decode("utf-8") - print(output) first_line = output.split("\n", maxsplit=1)[0] - print(first_line) return first_line.startswith("nixos") and ( first_line.endswith("nixos-21.11") or first_line.endswith("nixos-21.05") ) except subprocess.CalledProcessError: return False - return False def migrate(self): # Change the channel and update them. diff --git a/selfprivacy_api/migrations/mount_volume.py b/selfprivacy_api/migrations/mount_volume.py new file mode 100644 index 0000000..27fba83 --- /dev/null +++ b/selfprivacy_api/migrations/mount_volume.py @@ -0,0 +1,51 @@ +import os +import subprocess + +from selfprivacy_api.migrations.migration import Migration +from selfprivacy_api.utils import ReadUserData, WriteUserData +from selfprivacy_api.utils.block_devices import BlockDevices + + +class MountVolume(Migration): + """Mount volume.""" + + def get_migration_name(self): + return "mount_volume" + + def get_migration_description(self): + return "Mount volume if it is not mounted." + + def is_migration_needed(self): + try: + with ReadUserData() as userdata: + return "volumes" not in userdata + except Exception as e: + print(e) + return False + + def migrate(self): + # Get info about existing volumes + # Write info about volumes to userdata.json + try: + volumes = BlockDevices().get_block_devices() + # If there is an unmounted volume sdb, + # Write it to userdata.json + is_there_a_volume = False + for volume in volumes: + if volume.name == "sdb": + is_there_a_volume = True + break + with WriteUserData() as userdata: + userdata["volumes"] = [] + if is_there_a_volume: + userdata["volumes"].append( + { + "device": "/dev/sdb", + "mountPoint": "/volumes/sdb", + "fsType": "ext4", + } + ) + print("Done") + except Exception as e: + print(e) + print("Error mounting volume") diff --git a/selfprivacy_api/resources/api_auth/__init__.py b/selfprivacy_api/resources/api_auth/__init__.py deleted file mode 100644 index 9bd1703..0000000 --- a/selfprivacy_api/resources/api_auth/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -#!/usr/bin/env python3 -"""API authentication module""" - -from flask import Blueprint -from flask_restful import Api - -auth = Blueprint("auth", __name__, url_prefix="/auth") -api = Api(auth) - -from . import ( - new_device, - recovery_token, - app_tokens, -) diff --git a/selfprivacy_api/resources/api_auth/app_tokens.py b/selfprivacy_api/resources/api_auth/app_tokens.py deleted file mode 100644 index 940c60a..0000000 --- a/selfprivacy_api/resources/api_auth/app_tokens.py +++ /dev/null @@ -1,118 +0,0 @@ -#!/usr/bin/env python3 -"""App tokens management module""" -from flask import request -from flask_restful import Resource, reqparse - -from selfprivacy_api.resources.api_auth import api -from selfprivacy_api.utils.auth import ( - delete_token, - get_tokens_info, - is_token_name_exists, - is_token_name_pair_valid, - refresh_token, - get_token_name, -) - - -class Tokens(Resource): - """Token management class - GET returns the list of active devices. - DELETE invalidates token unless it is the last one or the caller uses this token. - POST refreshes the token of the caller. - """ - - def get(self): - """ - Get current device tokens - --- - tags: - - Tokens - security: - - bearerAuth: [] - responses: - 200: - description: List of tokens - 400: - description: Bad request - """ - caller_name = get_token_name(request.headers.get("Authorization").split(" ")[1]) - tokens = get_tokens_info() - # Retrun a list of tokens and if it is the caller's token - # it will be marked with a flag - return [ - { - "name": token["name"], - "date": token["date"], - "is_caller": token["name"] == caller_name, - } - for token in tokens - ] - - def delete(self): - """ - Delete token - --- - tags: - - Tokens - security: - - bearerAuth: [] - parameters: - - in: body - name: token - required: true - description: Token's name to delete - schema: - type: object - properties: - token_name: - type: string - description: Token name to delete - required: true - responses: - 200: - description: Token deleted - 400: - description: Bad request - 404: - description: Token not found - """ - parser = reqparse.RequestParser() - parser.add_argument( - "token_name", type=str, required=True, help="Token to delete" - ) - args = parser.parse_args() - token_name = args["token_name"] - if is_token_name_pair_valid( - token_name, request.headers.get("Authorization").split(" ")[1] - ): - return {"message": "Cannot delete caller's token"}, 400 - if not is_token_name_exists(token_name): - return {"message": "Token not found"}, 404 - delete_token(token_name) - return {"message": "Token deleted"}, 200 - - def post(self): - """ - Refresh token - --- - tags: - - Tokens - security: - - bearerAuth: [] - responses: - 200: - description: Token refreshed - 400: - description: Bad request - 404: - description: Token not found - """ - # Get token from header - token = request.headers.get("Authorization").split(" ")[1] - new_token = refresh_token(token) - if new_token is None: - return {"message": "Token not found"}, 404 - return {"token": new_token}, 200 - - -api.add_resource(Tokens, "/tokens") diff --git a/selfprivacy_api/resources/api_auth/new_device.py b/selfprivacy_api/resources/api_auth/new_device.py deleted file mode 100644 index 2c0bde1..0000000 --- a/selfprivacy_api/resources/api_auth/new_device.py +++ /dev/null @@ -1,103 +0,0 @@ -#!/usr/bin/env python3 -"""New device auth module""" -from flask_restful import Resource, reqparse - -from selfprivacy_api.resources.api_auth import api -from selfprivacy_api.utils.auth import ( - get_new_device_auth_token, - use_new_device_auth_token, - delete_new_device_auth_token, -) - - -class NewDevice(Resource): - """New device auth class - POST returns a new token for the caller. - """ - - def post(self): - """ - Get new device token - --- - tags: - - Tokens - security: - - bearerAuth: [] - responses: - 200: - description: New device token - 400: - description: Bad request - """ - token = get_new_device_auth_token() - return {"token": token} - - def delete(self): - """ - Delete new device token - --- - tags: - - Tokens - security: - - bearerAuth: [] - responses: - 200: - description: New device token deleted - 400: - description: Bad request - """ - delete_new_device_auth_token() - return {"token": None} - - -class AuthorizeDevice(Resource): - """Authorize device class - POST authorizes the caller. - """ - - def post(self): - """ - Authorize device - --- - tags: - - Tokens - parameters: - - in: body - name: data - required: true - description: Who is authorizing - schema: - type: object - properties: - token: - type: string - description: Mnemonic token to authorize - device: - type: string - description: Device to authorize - responses: - 200: - description: Device authorized - 400: - description: Bad request - 404: - description: Token not found - """ - parser = reqparse.RequestParser() - parser.add_argument( - "token", type=str, required=True, help="Mnemonic token to authorize" - ) - parser.add_argument( - "device", type=str, required=True, help="Device to authorize" - ) - args = parser.parse_args() - auth_token = args["token"] - device = args["device"] - token = use_new_device_auth_token(auth_token, device) - if token is None: - return {"message": "Token not found"}, 404 - return {"message": "Device authorized", "token": token}, 200 - - -api.add_resource(NewDevice, "/new_device") -api.add_resource(AuthorizeDevice, "/new_device/authorize") diff --git a/selfprivacy_api/resources/api_auth/recovery_token.py b/selfprivacy_api/resources/api_auth/recovery_token.py deleted file mode 100644 index fbd80d9..0000000 --- a/selfprivacy_api/resources/api_auth/recovery_token.py +++ /dev/null @@ -1,196 +0,0 @@ -#!/usr/bin/env python3 -"""Recovery token module""" -from datetime import datetime -from flask_restful import Resource, reqparse - -from selfprivacy_api.resources.api_auth import api -from selfprivacy_api.utils.auth import ( - is_recovery_token_exists, - is_recovery_token_valid, - get_recovery_token_status, - generate_recovery_token, - use_mnemonic_recoverery_token, -) - - -class RecoveryToken(Resource): - """Recovery token class - GET returns the status of the recovery token. - POST generates a new recovery token. - """ - - def get(self): - """ - Get recovery token status - --- - tags: - - Tokens - security: - - bearerAuth: [] - responses: - 200: - description: Recovery token status - schema: - type: object - properties: - exists: - type: boolean - description: Recovery token exists - valid: - type: boolean - description: Recovery token is valid - date: - type: string - description: Recovery token date - expiration: - type: string - description: Recovery token expiration date - uses_left: - type: integer - description: Recovery token uses left - 400: - description: Bad request - """ - if not is_recovery_token_exists(): - return { - "exists": False, - "valid": False, - "date": None, - "expiration": None, - "uses_left": None, - } - status = get_recovery_token_status() - if not is_recovery_token_valid(): - return { - "exists": True, - "valid": False, - "date": status["date"], - "expiration": status["expiration"], - "uses_left": status["uses_left"], - } - return { - "exists": True, - "valid": True, - "date": status["date"], - "expiration": status["expiration"], - "uses_left": status["uses_left"], - } - - def post(self): - """ - Generate recovery token - --- - tags: - - Tokens - security: - - bearerAuth: [] - parameters: - - in: body - name: data - required: true - description: Token data - schema: - type: object - properties: - expiration: - type: string - description: Token expiration date - uses: - type: integer - description: Token uses - responses: - 200: - description: Recovery token generated - schema: - type: object - properties: - token: - type: string - description: Mnemonic recovery token - 400: - description: Bad request - """ - parser = reqparse.RequestParser() - parser.add_argument( - "expiration", type=str, required=False, help="Token expiration date" - ) - parser.add_argument("uses", type=int, required=False, help="Token uses") - args = parser.parse_args() - # Convert expiration date to datetime and return 400 if it is not valid - if args["expiration"]: - try: - expiration = datetime.strptime( - args["expiration"], "%Y-%m-%dT%H:%M:%S.%fZ" - ) - # Retrun 400 if expiration date is in the past - if expiration < datetime.now(): - return {"message": "Expiration date cannot be in the past"}, 400 - except ValueError: - return { - "error": "Invalid expiration date. Use YYYY-MM-DDTHH:MM:SS.SSSZ" - }, 400 - else: - expiration = None - if args["uses"] != None and args["uses"] < 1: - return {"message": "Uses must be greater than 0"}, 400 - # Generate recovery token - token = generate_recovery_token(expiration, args["uses"]) - return {"token": token} - - -class UseRecoveryToken(Resource): - """Use recovery token class - POST uses the recovery token. - """ - - def post(self): - """ - Use recovery token - --- - tags: - - Tokens - parameters: - - in: body - name: data - required: true - description: Token data - schema: - type: object - properties: - token: - type: string - description: Mnemonic recovery token - device: - type: string - description: Device to authorize - responses: - 200: - description: Recovery token used - schema: - type: object - properties: - token: - type: string - description: Device authorization token - 400: - description: Bad request - 404: - description: Token not found - """ - parser = reqparse.RequestParser() - parser.add_argument( - "token", type=str, required=True, help="Mnemonic recovery token" - ) - parser.add_argument( - "device", type=str, required=True, help="Device to authorize" - ) - args = parser.parse_args() - # Use recovery token - token = use_mnemonic_recoverery_token(args["token"], args["device"]) - if token is None: - return {"error": "Token not found"}, 404 - return {"token": token} - - -api.add_resource(RecoveryToken, "/recovery_token") -api.add_resource(UseRecoveryToken, "/recovery_token/use") diff --git a/selfprivacy_api/resources/common.py b/selfprivacy_api/resources/common.py deleted file mode 100644 index ce0dedf..0000000 --- a/selfprivacy_api/resources/common.py +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/env python3 -"""Unassigned views""" -from flask_restful import Resource - - -class ApiVersion(Resource): - """SelfPrivacy API version""" - - def get(self): - """Get API version - --- - tags: - - System - responses: - 200: - description: API version - schema: - type: object - properties: - version: - type: string - description: API version - 401: - description: Unauthorized - """ - return {"version": "1.2.7"} diff --git a/selfprivacy_api/resources/services/__init__.py b/selfprivacy_api/resources/services/__init__.py deleted file mode 100644 index a7f1dbe..0000000 --- a/selfprivacy_api/resources/services/__init__.py +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/env python3 -"""Services management module""" -from flask import Blueprint -from flask_restful import Api - -services = Blueprint("services", __name__, url_prefix="/services") -api = Api(services) - -from . import ( - bitwarden, - gitea, - mailserver, - main, - nextcloud, - ocserv, - pleroma, - restic, - ssh, -) diff --git a/selfprivacy_api/resources/services/bitwarden.py b/selfprivacy_api/resources/services/bitwarden.py deleted file mode 100644 index 412ba8a..0000000 --- a/selfprivacy_api/resources/services/bitwarden.py +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env python3 -"""Bitwarden management module""" -from flask_restful import Resource - -from selfprivacy_api.resources.services import api -from selfprivacy_api.utils import WriteUserData - - -class EnableBitwarden(Resource): - """Enable Bitwarden""" - - def post(self): - """ - Enable Bitwarden - --- - tags: - - Bitwarden - security: - - bearerAuth: [] - responses: - 200: - description: Bitwarden enabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "bitwarden" not in data: - data["bitwarden"] = {} - data["bitwarden"]["enable"] = True - - return { - "status": 0, - "message": "Bitwarden enabled", - } - - -class DisableBitwarden(Resource): - """Disable Bitwarden""" - - def post(self): - """ - Disable Bitwarden - --- - tags: - - Bitwarden - security: - - bearerAuth: [] - responses: - 200: - description: Bitwarden disabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "bitwarden" not in data: - data["bitwarden"] = {} - data["bitwarden"]["enable"] = False - - return { - "status": 0, - "message": "Bitwarden disabled", - } - - -api.add_resource(EnableBitwarden, "/bitwarden/enable") -api.add_resource(DisableBitwarden, "/bitwarden/disable") diff --git a/selfprivacy_api/resources/services/gitea.py b/selfprivacy_api/resources/services/gitea.py deleted file mode 100644 index bd4b8de..0000000 --- a/selfprivacy_api/resources/services/gitea.py +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env python3 -"""Gitea management module""" -from flask_restful import Resource - -from selfprivacy_api.resources.services import api -from selfprivacy_api.utils import WriteUserData - - -class EnableGitea(Resource): - """Enable Gitea""" - - def post(self): - """ - Enable Gitea - --- - tags: - - Gitea - security: - - bearerAuth: [] - responses: - 200: - description: Gitea enabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "gitea" not in data: - data["gitea"] = {} - data["gitea"]["enable"] = True - - return { - "status": 0, - "message": "Gitea enabled", - } - - -class DisableGitea(Resource): - """Disable Gitea""" - - def post(self): - """ - Disable Gitea - --- - tags: - - Gitea - security: - - bearerAuth: [] - responses: - 200: - description: Gitea disabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "gitea" not in data: - data["gitea"] = {} - data["gitea"]["enable"] = False - - return { - "status": 0, - "message": "Gitea disabled", - } - - -api.add_resource(EnableGitea, "/gitea/enable") -api.add_resource(DisableGitea, "/gitea/disable") diff --git a/selfprivacy_api/resources/services/mailserver.py b/selfprivacy_api/resources/services/mailserver.py deleted file mode 100644 index 1185d20..0000000 --- a/selfprivacy_api/resources/services/mailserver.py +++ /dev/null @@ -1,45 +0,0 @@ -#!/usr/bin/env python3 -"""Mail server management module""" -import base64 -import subprocess -import os -from flask_restful import Resource - -from selfprivacy_api.resources.services import api - -from selfprivacy_api.utils import get_domain - - -class DKIMKey(Resource): - """Get DKIM key from file""" - - def get(self): - """ - Get DKIM key from file - --- - tags: - - Email - security: - - bearerAuth: [] - responses: - 200: - description: DKIM key encoded in base64 - 401: - description: Unauthorized - 404: - description: DKIM key not found - """ - domain = get_domain() - - if os.path.exists("/var/dkim/" + domain + ".selector.txt"): - cat_process = subprocess.Popen( - ["cat", "/var/dkim/" + domain + ".selector.txt"], stdout=subprocess.PIPE - ) - dkim = cat_process.communicate()[0] - dkim = base64.b64encode(dkim) - dkim = str(dkim, "utf-8") - return dkim - return "DKIM file not found", 404 - - -api.add_resource(DKIMKey, "/mailserver/dkim") diff --git a/selfprivacy_api/resources/services/main.py b/selfprivacy_api/resources/services/main.py deleted file mode 100644 index 8b6743c..0000000 --- a/selfprivacy_api/resources/services/main.py +++ /dev/null @@ -1,84 +0,0 @@ -#!/usr/bin/env python3 -"""Services status module""" -import subprocess -from flask_restful import Resource - -from . import api - - -class ServiceStatus(Resource): - """Get service status""" - - def get(self): - """ - Get service status - --- - tags: - - Services - responses: - 200: - description: Service status - schema: - type: object - properties: - imap: - type: integer - description: Dovecot service status - smtp: - type: integer - description: Postfix service status - http: - type: integer - description: Nginx service status - bitwarden: - type: integer - description: Bitwarden service status - gitea: - type: integer - description: Gitea service status - nextcloud: - type: integer - description: Nextcloud service status - ocserv: - type: integer - description: OpenConnect VPN service status - pleroma: - type: integer - description: Pleroma service status - 401: - description: Unauthorized - """ - imap_service = subprocess.Popen(["systemctl", "status", "dovecot2.service"]) - imap_service.communicate()[0] - smtp_service = subprocess.Popen(["systemctl", "status", "postfix.service"]) - smtp_service.communicate()[0] - http_service = subprocess.Popen(["systemctl", "status", "nginx.service"]) - http_service.communicate()[0] - bitwarden_service = subprocess.Popen( - ["systemctl", "status", "vaultwarden.service"] - ) - bitwarden_service.communicate()[0] - gitea_service = subprocess.Popen(["systemctl", "status", "gitea.service"]) - gitea_service.communicate()[0] - nextcloud_service = subprocess.Popen( - ["systemctl", "status", "phpfpm-nextcloud.service"] - ) - nextcloud_service.communicate()[0] - ocserv_service = subprocess.Popen(["systemctl", "status", "ocserv.service"]) - ocserv_service.communicate()[0] - pleroma_service = subprocess.Popen(["systemctl", "status", "pleroma.service"]) - pleroma_service.communicate()[0] - - return { - "imap": imap_service.returncode, - "smtp": smtp_service.returncode, - "http": http_service.returncode, - "bitwarden": bitwarden_service.returncode, - "gitea": gitea_service.returncode, - "nextcloud": nextcloud_service.returncode, - "ocserv": ocserv_service.returncode, - "pleroma": pleroma_service.returncode, - } - - -api.add_resource(ServiceStatus, "/status") diff --git a/selfprivacy_api/resources/services/nextcloud.py b/selfprivacy_api/resources/services/nextcloud.py deleted file mode 100644 index 3aa9d06..0000000 --- a/selfprivacy_api/resources/services/nextcloud.py +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env python3 -"""Nextcloud management module""" -from flask_restful import Resource - -from selfprivacy_api.resources.services import api -from selfprivacy_api.utils import WriteUserData - - -class EnableNextcloud(Resource): - """Enable Nextcloud""" - - def post(self): - """ - Enable Nextcloud - --- - tags: - - Nextcloud - security: - - bearerAuth: [] - responses: - 200: - description: Nextcloud enabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "nextcloud" not in data: - data["nextcloud"] = {} - data["nextcloud"]["enable"] = True - - return { - "status": 0, - "message": "Nextcloud enabled", - } - - -class DisableNextcloud(Resource): - """Disable Nextcloud""" - - def post(self): - """ - Disable Nextcloud - --- - tags: - - Nextcloud - security: - - bearerAuth: [] - responses: - 200: - description: Nextcloud disabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "nextcloud" not in data: - data["nextcloud"] = {} - data["nextcloud"]["enable"] = False - - return { - "status": 0, - "message": "Nextcloud disabled", - } - - -api.add_resource(EnableNextcloud, "/nextcloud/enable") -api.add_resource(DisableNextcloud, "/nextcloud/disable") diff --git a/selfprivacy_api/resources/services/ocserv.py b/selfprivacy_api/resources/services/ocserv.py deleted file mode 100644 index 4dc83da..0000000 --- a/selfprivacy_api/resources/services/ocserv.py +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env python3 -"""OpenConnect VPN server management module""" -from flask_restful import Resource - -from selfprivacy_api.resources.services import api -from selfprivacy_api.utils import WriteUserData - - -class EnableOcserv(Resource): - """Enable OpenConnect VPN server""" - - def post(self): - """ - Enable OCserv - --- - tags: - - OCserv - security: - - bearerAuth: [] - responses: - 200: - description: OCserv enabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "ocserv" not in data: - data["ocserv"] = {} - data["ocserv"]["enable"] = True - - return { - "status": 0, - "message": "OpenConnect VPN server enabled", - } - - -class DisableOcserv(Resource): - """Disable OpenConnect VPN server""" - - def post(self): - """ - Disable OCserv - --- - tags: - - OCserv - security: - - bearerAuth: [] - responses: - 200: - description: OCserv disabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "ocserv" not in data: - data["ocserv"] = {} - data["ocserv"]["enable"] = False - - return { - "status": 0, - "message": "OpenConnect VPN server disabled", - } - - -api.add_resource(EnableOcserv, "/ocserv/enable") -api.add_resource(DisableOcserv, "/ocserv/disable") diff --git a/selfprivacy_api/resources/services/pleroma.py b/selfprivacy_api/resources/services/pleroma.py deleted file mode 100644 index aaf08f0..0000000 --- a/selfprivacy_api/resources/services/pleroma.py +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env python3 -"""Pleroma management module""" -from flask_restful import Resource - -from selfprivacy_api.resources.services import api -from selfprivacy_api.utils import WriteUserData - - -class EnablePleroma(Resource): - """Enable Pleroma""" - - def post(self): - """ - Enable Pleroma - --- - tags: - - Pleroma - security: - - bearerAuth: [] - responses: - 200: - description: Pleroma enabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "pleroma" not in data: - data["pleroma"] = {} - data["pleroma"]["enable"] = True - - return { - "status": 0, - "message": "Pleroma enabled", - } - - -class DisablePleroma(Resource): - """Disable Pleroma""" - - def post(self): - """ - Disable Pleroma - --- - tags: - - Pleroma - security: - - bearerAuth: [] - responses: - 200: - description: Pleroma disabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "pleroma" not in data: - data["pleroma"] = {} - data["pleroma"]["enable"] = False - - return { - "status": 0, - "message": "Pleroma disabled", - } - - -api.add_resource(EnablePleroma, "/pleroma/enable") -api.add_resource(DisablePleroma, "/pleroma/disable") diff --git a/selfprivacy_api/resources/services/restic.py b/selfprivacy_api/resources/services/restic.py deleted file mode 100644 index dd22c9a..0000000 --- a/selfprivacy_api/resources/services/restic.py +++ /dev/null @@ -1,241 +0,0 @@ -#!/usr/bin/env python3 -"""Backups management module""" -from flask_restful import Resource, reqparse - -from selfprivacy_api.resources.services import api -from selfprivacy_api.utils import WriteUserData -from selfprivacy_api.restic_controller import tasks as restic_tasks -from selfprivacy_api.restic_controller import ResticController, ResticStates - - -class ListAllBackups(Resource): - """List all restic backups""" - - def get(self): - """ - Get all restic backups - --- - tags: - - Backups - security: - - bearerAuth: [] - responses: - 200: - description: A list of snapshots - 400: - description: Bad request - 401: - description: Unauthorized - """ - - restic = ResticController() - return restic.snapshot_list - - -class AsyncCreateBackup(Resource): - """Create a new restic backup""" - - def put(self): - """ - Initiate a new restic backup - --- - tags: - - Backups - security: - - bearerAuth: [] - responses: - 200: - description: Backup creation has started - 400: - description: Bad request - 401: - description: Unauthorized - 409: - description: Backup already in progress - """ - restic = ResticController() - if restic.state is ResticStates.NO_KEY: - return {"error": "No key provided"}, 400 - if restic.state is ResticStates.INITIALIZING: - return {"error": "Backup is initializing"}, 400 - if restic.state is ResticStates.BACKING_UP: - return {"error": "Backup is already running"}, 409 - restic_tasks.start_backup() - return { - "status": 0, - "message": "Backup creation has started", - } - - -class CheckBackupStatus(Resource): - """Check current backup status""" - - def get(self): - """ - Get backup status - --- - tags: - - Backups - security: - - bearerAuth: [] - responses: - 200: - description: Backup status - 400: - description: Bad request - 401: - description: Unauthorized - """ - restic = ResticController() - - return { - "status": restic.state.name, - "progress": restic.progress, - "error_message": restic.error_message, - } - - -class ForceReloadSnapshots(Resource): - """Force reload snapshots""" - - def get(self): - """ - Force reload snapshots - --- - tags: - - Backups - security: - - bearerAuth: [] - responses: - 200: - description: Snapshots reloaded - 400: - description: Bad request - 401: - description: Unauthorized - """ - restic_tasks.load_snapshots() - return { - "status": 0, - "message": "Snapshots reload started", - } - - -class AsyncRestoreBackup(Resource): - """Trigger backup restoration process""" - - def put(self): - """ - Start backup restoration - --- - tags: - - Backups - security: - - bearerAuth: [] - parameters: - - in: body - required: true - name: backup - description: Backup to restore - schema: - type: object - required: - - backupId - properties: - backupId: - type: string - responses: - 200: - description: Backup restoration process started - 400: - description: Bad request - 401: - description: Unauthorized - """ - parser = reqparse.RequestParser() - parser.add_argument("backupId", type=str, required=True) - args = parser.parse_args() - - restic = ResticController() - if restic.state is ResticStates.NO_KEY: - return {"error": "No key provided"}, 400 - if restic.state is ResticStates.NOT_INITIALIZED: - return {"error": "Repository is not initialized"}, 400 - if restic.state is ResticStates.BACKING_UP: - return {"error": "Backup is already running"}, 409 - if restic.state is ResticStates.INITIALIZING: - return {"error": "Repository is initializing"}, 400 - if restic.state is ResticStates.RESTORING: - return {"error": "Restore is already running"}, 409 - for backup in restic.snapshot_list: - if backup["short_id"] == args["backupId"]: - restic_tasks.restore_from_backup(args["backupId"]) - return { - "status": 0, - "message": "Backup restoration procedure started", - } - - return {"error": "Backup not found"}, 404 - - -class BackblazeConfig(Resource): - """Backblaze config""" - - def put(self): - """ - Set the new key for backblaze - --- - tags: - - Backups - security: - - bearerAuth: [] - parameters: - - in: body - required: true - name: backblazeSettings - description: New Backblaze settings - schema: - type: object - required: - - accountId - - accountKey - - bucket - properties: - accountId: - type: string - accountKey: - type: string - bucket: - type: string - responses: - 200: - description: New Backblaze settings - 400: - description: Bad request - 401: - description: Unauthorized - """ - parser = reqparse.RequestParser() - parser.add_argument("accountId", type=str, required=True) - parser.add_argument("accountKey", type=str, required=True) - parser.add_argument("bucket", type=str, required=True) - args = parser.parse_args() - - with WriteUserData() as data: - if "backblaze" not in data: - data["backblaze"] = {} - data["backblaze"]["accountId"] = args["accountId"] - data["backblaze"]["accountKey"] = args["accountKey"] - data["backblaze"]["bucket"] = args["bucket"] - - restic_tasks.update_keys_from_userdata() - - return "New Backblaze settings saved" - - -api.add_resource(ListAllBackups, "/restic/backup/list") -api.add_resource(AsyncCreateBackup, "/restic/backup/create") -api.add_resource(CheckBackupStatus, "/restic/backup/status") -api.add_resource(AsyncRestoreBackup, "/restic/backup/restore") -api.add_resource(BackblazeConfig, "/restic/backblaze/config") -api.add_resource(ForceReloadSnapshots, "/restic/backup/reload") diff --git a/selfprivacy_api/resources/services/ssh.py b/selfprivacy_api/resources/services/ssh.py deleted file mode 100644 index 3ea5a1d..0000000 --- a/selfprivacy_api/resources/services/ssh.py +++ /dev/null @@ -1,407 +0,0 @@ -#!/usr/bin/env python3 -"""SSH management module""" -from flask_restful import Resource, reqparse - -from selfprivacy_api.resources.services import api -from selfprivacy_api.utils import WriteUserData, ReadUserData, validate_ssh_public_key - - -class EnableSSH(Resource): - """Enable SSH""" - - def post(self): - """ - Enable SSH - --- - tags: - - SSH - security: - - bearerAuth: [] - responses: - 200: - description: SSH enabled - 401: - description: Unauthorized - """ - with WriteUserData() as data: - if "ssh" not in data: - data["ssh"] = {} - data["ssh"]["enable"] = True - - return { - "status": 0, - "message": "SSH enabled", - } - - -class SSHSettings(Resource): - """Enable/disable SSH""" - - def get(self): - """ - Get current SSH settings - --- - tags: - - SSH - security: - - bearerAuth: [] - responses: - 200: - description: SSH settings - 400: - description: Bad request - """ - with ReadUserData() as data: - if "ssh" not in data: - return {"enable": True, "passwordAuthentication": True} - if "enable" not in data["ssh"]: - data["ssh"]["enable"] = True - if "passwordAuthentication" not in data["ssh"]: - data["ssh"]["passwordAuthentication"] = True - return { - "enable": data["ssh"]["enable"], - "passwordAuthentication": data["ssh"]["passwordAuthentication"], - } - - def put(self): - """ - Change SSH settings - --- - tags: - - SSH - security: - - bearerAuth: [] - parameters: - - name: sshSettings - in: body - required: true - description: SSH settings - schema: - type: object - required: - - enable - - passwordAuthentication - properties: - enable: - type: boolean - passwordAuthentication: - type: boolean - responses: - 200: - description: New settings saved - 400: - description: Bad request - """ - parser = reqparse.RequestParser() - parser.add_argument("enable", type=bool, required=False) - parser.add_argument("passwordAuthentication", type=bool, required=False) - args = parser.parse_args() - enable = args["enable"] - password_authentication = args["passwordAuthentication"] - - with WriteUserData() as data: - if "ssh" not in data: - data["ssh"] = {} - if enable is not None: - data["ssh"]["enable"] = enable - if password_authentication is not None: - data["ssh"]["passwordAuthentication"] = password_authentication - - return "SSH settings changed" - - -class WriteSSHKey(Resource): - """Write new SSH key""" - - def put(self): - """ - Add a SSH root key - --- - consumes: - - application/json - tags: - - SSH - security: - - bearerAuth: [] - parameters: - - in: body - name: body - required: true - description: Public key to add - schema: - type: object - required: - - public_key - properties: - public_key: - type: string - description: ssh-ed25519 public key. - responses: - 201: - description: Key added - 400: - description: Bad request - 401: - description: Unauthorized - 409: - description: Key already exists - """ - parser = reqparse.RequestParser() - parser.add_argument( - "public_key", type=str, required=True, help="Key cannot be blank!" - ) - args = parser.parse_args() - - public_key = args["public_key"] - - if not validate_ssh_public_key(public_key): - return { - "error": "Invalid key type. Only ssh-ed25519 and ssh-rsa are supported.", - }, 400 - - with WriteUserData() as data: - if "ssh" not in data: - data["ssh"] = {} - if "rootKeys" not in data["ssh"]: - data["ssh"]["rootKeys"] = [] - # Return 409 if key already in array - for key in data["ssh"]["rootKeys"]: - if key == public_key: - return { - "error": "Key already exists", - }, 409 - data["ssh"]["rootKeys"].append(public_key) - - return { - "status": 0, - "message": "New SSH key successfully written", - }, 201 - - -class SSHKeys(Resource): - """List SSH keys""" - - def get(self, username): - """ - List SSH keys - --- - tags: - - SSH - security: - - bearerAuth: [] - parameters: - - in: path - name: username - type: string - required: true - description: User to list keys for - responses: - 200: - description: SSH keys - 401: - description: Unauthorized - """ - with ReadUserData() as data: - if username == "root": - if "ssh" not in data: - data["ssh"] = {} - if "rootKeys" not in data["ssh"]: - data["ssh"]["rootKeys"] = [] - return data["ssh"]["rootKeys"] - if username == data["username"]: - if "sshKeys" not in data: - data["sshKeys"] = [] - return data["sshKeys"] - if "users" not in data: - data["users"] = [] - for user in data["users"]: - if user["username"] == username: - if "sshKeys" not in user: - user["sshKeys"] = [] - return user["sshKeys"] - return { - "error": "User not found", - }, 404 - - def post(self, username): - """ - Add SSH key to the user - --- - tags: - - SSH - security: - - bearerAuth: [] - parameters: - - in: body - required: true - name: public_key - schema: - type: object - required: - - public_key - properties: - public_key: - type: string - - in: path - name: username - type: string - required: true - description: User to add keys for - responses: - 201: - description: SSH key added - 401: - description: Unauthorized - 404: - description: User not found - 409: - description: Key already exists - """ - parser = reqparse.RequestParser() - parser.add_argument( - "public_key", type=str, required=True, help="Key cannot be blank!" - ) - args = parser.parse_args() - - if username == "root": - return { - "error": "Use /ssh/key/send to add root keys", - }, 400 - - if not validate_ssh_public_key(args["public_key"]): - return { - "error": "Invalid key type. Only ssh-ed25519 and ssh-rsa are supported.", - }, 400 - - with WriteUserData() as data: - if username == data["username"]: - if "sshKeys" not in data: - data["sshKeys"] = [] - # Return 409 if key already in array - for key in data["sshKeys"]: - if key == args["public_key"]: - return { - "error": "Key already exists", - }, 409 - data["sshKeys"].append(args["public_key"]) - return { - "message": "New SSH key successfully written", - }, 201 - - if "users" not in data: - data["users"] = [] - for user in data["users"]: - if user["username"] == username: - if "sshKeys" not in user: - user["sshKeys"] = [] - # Return 409 if key already in array - for key in user["sshKeys"]: - if key == args["public_key"]: - return { - "error": "Key already exists", - }, 409 - user["sshKeys"].append(args["public_key"]) - return { - "message": "New SSH key successfully written", - }, 201 - return { - "error": "User not found", - }, 404 - - def delete(self, username): - """ - Delete SSH key - --- - tags: - - SSH - security: - - bearerAuth: [] - parameters: - - in: body - name: public_key - required: true - description: Key to delete - schema: - type: object - required: - - public_key - properties: - public_key: - type: string - - in: path - name: username - type: string - required: true - description: User to delete keys for - responses: - 200: - description: SSH key deleted - 401: - description: Unauthorized - 404: - description: Key not found - """ - parser = reqparse.RequestParser() - parser.add_argument( - "public_key", type=str, required=True, help="Key cannot be blank!" - ) - args = parser.parse_args() - - with WriteUserData() as data: - if username == "root": - if "ssh" not in data: - data["ssh"] = {} - if "rootKeys" not in data["ssh"]: - data["ssh"]["rootKeys"] = [] - # Return 404 if key not in array - for key in data["ssh"]["rootKeys"]: - if key == args["public_key"]: - data["ssh"]["rootKeys"].remove(key) - # If rootKeys became zero length, delete it - if len(data["ssh"]["rootKeys"]) == 0: - del data["ssh"]["rootKeys"] - return { - "message": "SSH key deleted", - }, 200 - return { - "error": "Key not found", - }, 404 - if username == data["username"]: - if "sshKeys" not in data: - data["sshKeys"] = [] - # Return 404 if key not in array - for key in data["sshKeys"]: - if key == args["public_key"]: - data["sshKeys"].remove(key) - return { - "message": "SSH key deleted", - }, 200 - return { - "error": "Key not found", - }, 404 - if "users" not in data: - data["users"] = [] - for user in data["users"]: - if user["username"] == username: - if "sshKeys" not in user: - user["sshKeys"] = [] - # Return 404 if key not in array - for key in user["sshKeys"]: - if key == args["public_key"]: - user["sshKeys"].remove(key) - return { - "message": "SSH key successfully deleted", - }, 200 - return { - "error": "Key not found", - }, 404 - return { - "error": "User not found", - }, 404 - - -api.add_resource(EnableSSH, "/ssh/enable") -api.add_resource(SSHSettings, "/ssh") - -api.add_resource(WriteSSHKey, "/ssh/key/send") -api.add_resource(SSHKeys, "/ssh/keys/") diff --git a/selfprivacy_api/resources/system.py b/selfprivacy_api/resources/system.py deleted file mode 100644 index db988da..0000000 --- a/selfprivacy_api/resources/system.py +++ /dev/null @@ -1,344 +0,0 @@ -#!/usr/bin/env python3 -"""System management module""" -import os -import subprocess -import pytz -from flask import Blueprint -from flask_restful import Resource, Api, reqparse - -from selfprivacy_api.utils import WriteUserData, ReadUserData - -api_system = Blueprint("system", __name__, url_prefix="/system") -api = Api(api_system) - - -class Timezone(Resource): - """Change timezone of NixOS""" - - def get(self): - """ - Get current system timezone - --- - tags: - - System - security: - - bearerAuth: [] - responses: - 200: - description: Timezone - 400: - description: Bad request - """ - with ReadUserData() as data: - if "timezone" not in data: - return "Europe/Uzhgorod" - return data["timezone"] - - def put(self): - """ - Change system timezone - --- - tags: - - System - security: - - bearerAuth: [] - parameters: - - name: timezone - in: body - required: true - description: Timezone to set - schema: - type: object - required: - - timezone - properties: - timezone: - type: string - responses: - 200: - description: Timezone changed - 400: - description: Bad request - """ - parser = reqparse.RequestParser() - parser.add_argument("timezone", type=str, required=True) - timezone = parser.parse_args()["timezone"] - - # Check if timezone is a valid tzdata string - if timezone not in pytz.all_timezones: - return {"error": "Invalid timezone"}, 400 - - with WriteUserData() as data: - data["timezone"] = timezone - return "Timezone changed" - - -class AutoUpgrade(Resource): - """Enable/disable automatic upgrades and reboots""" - - def get(self): - """ - Get current system autoupgrade settings - --- - tags: - - System - security: - - bearerAuth: [] - responses: - 200: - description: Auto-upgrade settings - 400: - description: Bad request - """ - with ReadUserData() as data: - if "autoUpgrade" not in data: - return {"enable": True, "allowReboot": False} - if "enable" not in data["autoUpgrade"]: - data["autoUpgrade"]["enable"] = True - if "allowReboot" not in data["autoUpgrade"]: - data["autoUpgrade"]["allowReboot"] = False - return data["autoUpgrade"] - - def put(self): - """ - Change system auto upgrade settings - --- - tags: - - System - security: - - bearerAuth: [] - parameters: - - name: autoUpgrade - in: body - required: true - description: Auto upgrade settings - schema: - type: object - required: - - enable - - allowReboot - properties: - enable: - type: boolean - allowReboot: - type: boolean - responses: - 200: - description: New settings saved - 400: - description: Bad request - """ - parser = reqparse.RequestParser() - parser.add_argument("enable", type=bool, required=False) - parser.add_argument("allowReboot", type=bool, required=False) - args = parser.parse_args() - enable = args["enable"] - allow_reboot = args["allowReboot"] - - with WriteUserData() as data: - if "autoUpgrade" not in data: - data["autoUpgrade"] = {} - if enable is not None: - data["autoUpgrade"]["enable"] = enable - if allow_reboot is not None: - data["autoUpgrade"]["allowReboot"] = allow_reboot - return "Auto-upgrade settings changed" - - -class RebuildSystem(Resource): - """Rebuild NixOS""" - - def get(self): - """ - Rebuild NixOS with nixos-rebuild switch - --- - tags: - - System - security: - - bearerAuth: [] - responses: - 200: - description: System rebuild has started - 401: - description: Unauthorized - """ - rebuild_result = subprocess.Popen( - ["systemctl", "start", "sp-nixos-rebuild.service"], start_new_session=True - ) - rebuild_result.communicate()[0] - return rebuild_result.returncode - - -class RollbackSystem(Resource): - """Rollback NixOS""" - - def get(self): - """ - Rollback NixOS with nixos-rebuild switch --rollback - --- - tags: - - System - security: - - bearerAuth: [] - responses: - 200: - description: System rollback has started - 401: - description: Unauthorized - """ - rollback_result = subprocess.Popen( - ["systemctl", "start", "sp-nixos-rollback.service"], start_new_session=True - ) - rollback_result.communicate()[0] - return rollback_result.returncode - - -class UpgradeSystem(Resource): - """Upgrade NixOS""" - - def get(self): - """ - Upgrade NixOS with nixos-rebuild switch --upgrade - --- - tags: - - System - security: - - bearerAuth: [] - responses: - 200: - description: System upgrade has started - 401: - description: Unauthorized - """ - upgrade_result = subprocess.Popen( - ["systemctl", "start", "sp-nixos-upgrade.service"], start_new_session=True - ) - upgrade_result.communicate()[0] - return upgrade_result.returncode - - -class RebootSystem(Resource): - """Reboot the system""" - - def get(self): - """ - Reboot the system - --- - tags: - - System - security: - - bearerAuth: [] - responses: - 200: - description: System reboot has started - 401: - description: Unauthorized - """ - subprocess.Popen(["reboot"], start_new_session=True) - return "System reboot has started" - - -class SystemVersion(Resource): - """Get system version from uname""" - - def get(self): - """ - Get system version from uname -a - --- - tags: - - System - security: - - bearerAuth: [] - responses: - 200: - description: OK - 401: - description: Unauthorized - """ - return { - "system_version": subprocess.check_output(["uname", "-a"]) - .decode("utf-8") - .strip() - } - - -class PythonVersion(Resource): - """Get python version""" - - def get(self): - """ - Get python version used by this API - --- - tags: - - System - security: - - bearerAuth: [] - responses: - 200: - description: OK - 401: - description: Unauthorized - """ - return subprocess.check_output(["python", "-V"]).decode("utf-8").strip() - - -class PullRepositoryChanges(Resource): - """Pull NixOS config repository changes""" - - def get(self): - """ - Pull Repository Changes - --- - tags: - - System - security: - - bearerAuth: [] - responses: - 200: - description: Got update - 201: - description: Nothing to update - 401: - description: Unauthorized - 500: - description: Something went wrong - """ - - git_pull_command = ["git", "pull"] - - current_working_directory = os.getcwd() - os.chdir("/etc/nixos") - - git_pull_process_descriptor = subprocess.Popen( - git_pull_command, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - shell=False, - ) - - data = git_pull_process_descriptor.communicate()[0].decode("utf-8") - - os.chdir(current_working_directory) - - if git_pull_process_descriptor.returncode == 0: - return { - "status": 0, - "message": "Update completed successfully", - "data": data, - } - return { - "status": git_pull_process_descriptor.returncode, - "message": "Something went wrong", - "data": data, - }, 500 - - -api.add_resource(Timezone, "/configuration/timezone") -api.add_resource(AutoUpgrade, "/configuration/autoUpgrade") -api.add_resource(RebuildSystem, "/configuration/apply") -api.add_resource(RollbackSystem, "/configuration/rollback") -api.add_resource(UpgradeSystem, "/configuration/upgrade") -api.add_resource(RebootSystem, "/reboot") -api.add_resource(SystemVersion, "/version") -api.add_resource(PythonVersion, "/pythonVersion") -api.add_resource(PullRepositoryChanges, "/configuration/pull") diff --git a/selfprivacy_api/resources/users.py b/selfprivacy_api/resources/users.py deleted file mode 100644 index e114324..0000000 --- a/selfprivacy_api/resources/users.py +++ /dev/null @@ -1,162 +0,0 @@ -#!/usr/bin/env python3 -"""Users management module""" -import subprocess -import re -from flask_restful import Resource, reqparse - -from selfprivacy_api.utils import WriteUserData, ReadUserData, is_username_forbidden - - -class Users(Resource): - """Users management""" - - def get(self): - """ - Get a list of users - --- - tags: - - Users - security: - - bearerAuth: [] - responses: - 200: - description: A list of users - 401: - description: Unauthorized - """ - parser = reqparse.RequestParser(bundle_errors=True) - parser.add_argument("withMainUser", type=bool, required=False) - args = parser.parse_args() - with_main_user = False if args["withMainUser"] is None else args["withMainUser"] - - with ReadUserData() as data: - users = [] - if with_main_user: - users.append(data["username"]) - if "users" in data: - for user in data["users"]: - users.append(user["username"]) - return users - - def post(self): - """ - Create a new user - --- - consumes: - - application/json - tags: - - Users - security: - - bearerAuth: [] - parameters: - - in: body - name: user - required: true - description: User to create - schema: - type: object - required: - - username - - password - properties: - username: - type: string - description: Unix username. Must be alphanumeric and less than 32 characters - password: - type: string - description: Unix password. - responses: - 201: - description: Created user - 400: - description: Bad request - 401: - description: Unauthorized - 409: - description: User already exists - """ - parser = reqparse.RequestParser(bundle_errors=True) - parser.add_argument("username", type=str, required=True) - parser.add_argument("password", type=str, required=True) - args = parser.parse_args() - hashing_command = ["mkpasswd", "-m", "sha-512", args["password"]] - password_hash_process_descriptor = subprocess.Popen( - hashing_command, - shell=False, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - ) - hashed_password = password_hash_process_descriptor.communicate()[0] - hashed_password = hashed_password.decode("ascii") - hashed_password = hashed_password.rstrip() - # Check if username is forbidden - if is_username_forbidden(args["username"]): - return {"message": "Username is forbidden"}, 409 - # Check is username passes regex - if not re.match(r"^[a-z_][a-z0-9_]+$", args["username"]): - return {"error": "username must be alphanumeric"}, 400 - # Check if username less than 32 characters - if len(args["username"]) >= 32: - return {"error": "username must be less than 32 characters"}, 400 - - with WriteUserData() as data: - if "users" not in data: - data["users"] = [] - - # Return 409 if user already exists - if data["username"] == args["username"]: - return {"error": "User already exists"}, 409 - - for user in data["users"]: - if user["username"] == args["username"]: - return {"error": "User already exists"}, 409 - - data["users"].append( - { - "username": args["username"], - "hashedPassword": hashed_password, - } - ) - - return {"result": 0, "username": args["username"]}, 201 - - -class User(Resource): - """Single user managment""" - - def delete(self, username): - """ - Delete a user - --- - tags: - - Users - security: - - bearerAuth: [] - parameters: - - in: path - name: username - required: true - description: User to delete - type: string - responses: - 200: - description: Deleted user - 400: - description: Bad request - 401: - description: Unauthorized - 404: - description: User not found - """ - with WriteUserData() as data: - if username == data["username"]: - return {"error": "Cannot delete root user"}, 400 - # Return 400 if user does not exist - for user in data["users"]: - if user["username"] == username: - data["users"].remove(user) - break - else: - return {"error": "User does not exist"}, 404 - - return {"result": 0, "username": username} diff --git a/selfprivacy_api/rest/__init__.py b/selfprivacy_api/rest/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/selfprivacy_api/rest/api_auth.py b/selfprivacy_api/rest/api_auth.py new file mode 100644 index 0000000..f73056c --- /dev/null +++ b/selfprivacy_api/rest/api_auth.py @@ -0,0 +1,127 @@ +from datetime import datetime +from typing import Optional +from fastapi import APIRouter, Depends, HTTPException +from pydantic import BaseModel +from selfprivacy_api.actions.api_tokens import ( + CannotDeleteCallerException, + InvalidExpirationDate, + InvalidUsesLeft, + NotFoundException, + delete_api_token, + get_api_recovery_token_status, + get_api_tokens_with_caller_flag, + get_new_api_recovery_key, + refresh_api_token, +) + +from selfprivacy_api.dependencies import TokenHeader, get_token_header + +from selfprivacy_api.utils.auth import ( + delete_new_device_auth_token, + get_new_device_auth_token, + use_mnemonic_recoverery_token, + use_new_device_auth_token, +) + +router = APIRouter( + prefix="/auth", + tags=["auth"], + responses={404: {"description": "Not found"}}, +) + + +@router.get("/tokens") +async def rest_get_tokens(auth_token: TokenHeader = Depends(get_token_header)): + """Get the tokens info""" + return get_api_tokens_with_caller_flag(auth_token.token) + + +class DeleteTokenInput(BaseModel): + """Delete token input""" + + token_name: str + + +@router.delete("/tokens") +async def rest_delete_tokens( + token: DeleteTokenInput, auth_token: TokenHeader = Depends(get_token_header) +): + """Delete the tokens""" + try: + delete_api_token(auth_token.token, token.token_name) + except NotFoundException: + raise HTTPException(status_code=404, detail="Token not found") + except CannotDeleteCallerException: + raise HTTPException(status_code=400, detail="Cannot delete caller's token") + return {"message": "Token deleted"} + + +@router.post("/tokens") +async def rest_refresh_token(auth_token: TokenHeader = Depends(get_token_header)): + """Refresh the token""" + try: + new_token = refresh_api_token(auth_token.token) + except NotFoundException: + raise HTTPException(status_code=404, detail="Token not found") + return {"token": new_token} + + +@router.get("/recovery_token") +async def rest_get_recovery_token_status( + auth_token: TokenHeader = Depends(get_token_header), +): + return get_api_recovery_token_status() + + +class CreateRecoveryTokenInput(BaseModel): + expiration: Optional[datetime] = None + uses: Optional[int] = None + + +@router.post("/recovery_token") +async def rest_create_recovery_token( + limits: CreateRecoveryTokenInput = CreateRecoveryTokenInput(), + auth_token: TokenHeader = Depends(get_token_header), +): + try: + token = get_new_api_recovery_key(limits.expiration, limits.uses) + except InvalidExpirationDate as e: + raise HTTPException(status_code=400, detail=str(e)) + except InvalidUsesLeft as e: + raise HTTPException(status_code=400, detail=str(e)) + return {"token": token} + + +class UseTokenInput(BaseModel): + token: str + device: str + + +@router.post("/recovery_token/use") +async def rest_use_recovery_token(input: UseTokenInput): + token = use_mnemonic_recoverery_token(input.token, input.device) + if token is None: + raise HTTPException(status_code=404, detail="Token not found") + return {"token": token} + + +@router.post("/new_device") +async def rest_new_device(auth_token: TokenHeader = Depends(get_token_header)): + token = get_new_device_auth_token() + return {"token": token} + + +@router.delete("/new_device") +async def rest_delete_new_device_token( + auth_token: TokenHeader = Depends(get_token_header), +): + delete_new_device_auth_token() + return {"token": None} + + +@router.post("/new_device/authorize") +async def rest_new_device_authorize(input: UseTokenInput): + token = use_new_device_auth_token(input.token, input.device) + if token is None: + raise HTTPException(status_code=404, detail="Token not found") + return {"message": "Device authorized", "token": token} diff --git a/selfprivacy_api/rest/services.py b/selfprivacy_api/rest/services.py new file mode 100644 index 0000000..c9d5ff9 --- /dev/null +++ b/selfprivacy_api/rest/services.py @@ -0,0 +1,373 @@ +"""Basic services legacy api""" +import base64 +from typing import Optional +from fastapi import APIRouter, Depends, HTTPException +from pydantic import BaseModel +from selfprivacy_api.actions.ssh import ( + InvalidPublicKey, + KeyAlreadyExists, + KeyNotFound, + create_ssh_key, + enable_ssh, + get_ssh_settings, + remove_ssh_key, + set_ssh_settings, +) +from selfprivacy_api.actions.users import UserNotFound, get_user_by_username + +from selfprivacy_api.dependencies import get_token_header +from selfprivacy_api.restic_controller import ResticController, ResticStates +from selfprivacy_api.restic_controller import tasks as restic_tasks +from selfprivacy_api.services.bitwarden import Bitwarden +from selfprivacy_api.services.gitea import Gitea +from selfprivacy_api.services.mailserver import MailServer +from selfprivacy_api.services.nextcloud import Nextcloud +from selfprivacy_api.services.ocserv import Ocserv +from selfprivacy_api.services.pleroma import Pleroma +from selfprivacy_api.services.service import ServiceStatus +from selfprivacy_api.utils import WriteUserData, get_dkim_key, get_domain + +router = APIRouter( + prefix="/services", + tags=["services"], + dependencies=[Depends(get_token_header)], + responses={404: {"description": "Not found"}}, +) + + +def service_status_to_return_code(status: ServiceStatus): + """Converts service status object to return code for + compatibility with legacy api""" + if status == ServiceStatus.ACTIVE: + return 0 + elif status == ServiceStatus.FAILED: + return 1 + elif status == ServiceStatus.INACTIVE: + return 3 + elif status == ServiceStatus.OFF: + return 4 + else: + return 2 + + +@router.get("/status") +async def get_status(): + """Get the status of the services""" + mail_status = MailServer.get_status() + bitwarden_status = Bitwarden.get_status() + gitea_status = Gitea.get_status() + nextcloud_status = Nextcloud.get_status() + ocserv_stauts = Ocserv.get_status() + pleroma_status = Pleroma.get_status() + + return { + "imap": service_status_to_return_code(mail_status), + "smtp": service_status_to_return_code(mail_status), + "http": 0, + "bitwarden": service_status_to_return_code(bitwarden_status), + "gitea": service_status_to_return_code(gitea_status), + "nextcloud": service_status_to_return_code(nextcloud_status), + "ocserv": service_status_to_return_code(ocserv_stauts), + "pleroma": service_status_to_return_code(pleroma_status), + } + + +@router.post("/bitwarden/enable") +async def enable_bitwarden(): + """Enable Bitwarden""" + Bitwarden.enable() + return { + "status": 0, + "message": "Bitwarden enabled", + } + + +@router.post("/bitwarden/disable") +async def disable_bitwarden(): + """Disable Bitwarden""" + Bitwarden.disable() + return { + "status": 0, + "message": "Bitwarden disabled", + } + + +@router.post("/gitea/enable") +async def enable_gitea(): + """Enable Gitea""" + Gitea.enable() + return { + "status": 0, + "message": "Gitea enabled", + } + + +@router.post("/gitea/disable") +async def disable_gitea(): + """Disable Gitea""" + Gitea.disable() + return { + "status": 0, + "message": "Gitea disabled", + } + + +@router.get("/mailserver/dkim") +async def get_mailserver_dkim(): + """Get the DKIM record for the mailserver""" + domain = get_domain() + + dkim = get_dkim_key(domain) + if dkim is None: + raise HTTPException(status_code=404, detail="DKIM record not found") + dkim = base64.b64encode(dkim.encode("utf-8")).decode("utf-8") + return dkim + + +@router.post("/nextcloud/enable") +async def enable_nextcloud(): + """Enable Nextcloud""" + Nextcloud.enable() + return { + "status": 0, + "message": "Nextcloud enabled", + } + + +@router.post("/nextcloud/disable") +async def disable_nextcloud(): + """Disable Nextcloud""" + Nextcloud.disable() + return { + "status": 0, + "message": "Nextcloud disabled", + } + + +@router.post("/ocserv/enable") +async def enable_ocserv(): + """Enable Ocserv""" + Ocserv.enable() + return { + "status": 0, + "message": "Ocserv enabled", + } + + +@router.post("/ocserv/disable") +async def disable_ocserv(): + """Disable Ocserv""" + Ocserv.disable() + return { + "status": 0, + "message": "Ocserv disabled", + } + + +@router.post("/pleroma/enable") +async def enable_pleroma(): + """Enable Pleroma""" + Pleroma.enable() + return { + "status": 0, + "message": "Pleroma enabled", + } + + +@router.post("/pleroma/disable") +async def disable_pleroma(): + """Disable Pleroma""" + Pleroma.disable() + return { + "status": 0, + "message": "Pleroma disabled", + } + + +@router.get("/restic/backup/list") +async def get_restic_backup_list(): + restic = ResticController() + return restic.snapshot_list + + +@router.put("/restic/backup/create") +async def create_restic_backup(): + restic = ResticController() + if restic.state is ResticStates.NO_KEY: + raise HTTPException(status_code=400, detail="Backup key not provided") + if restic.state is ResticStates.INITIALIZING: + raise HTTPException(status_code=400, detail="Backup is initializing") + if restic.state is ResticStates.BACKING_UP: + raise HTTPException(status_code=409, detail="Backup is already running") + restic_tasks.start_backup() + return { + "status": 0, + "message": "Backup creation has started", + } + + +@router.get("/restic/backup/status") +async def get_restic_backup_status(): + restic = ResticController() + + return { + "status": restic.state.name, + "progress": restic.progress, + "error_message": restic.error_message, + } + + +@router.get("/restic/backup/reload") +async def reload_restic_backup(): + restic_tasks.load_snapshots() + return { + "status": 0, + "message": "Snapshots reload started", + } + + +class BackupRestoreInput(BaseModel): + backupId: str + + +@router.put("/restic/backup/restore") +async def restore_restic_backup(backup: BackupRestoreInput): + restic = ResticController() + if restic.state is ResticStates.NO_KEY: + raise HTTPException(status_code=400, detail="Backup key not provided") + if restic.state is ResticStates.NOT_INITIALIZED: + raise HTTPException( + status_code=400, detail="Backups repository is not initialized" + ) + if restic.state is ResticStates.BACKING_UP: + raise HTTPException(status_code=409, detail="Backup is already running") + if restic.state is ResticStates.INITIALIZING: + raise HTTPException(status_code=400, detail="Repository is initializing") + if restic.state is ResticStates.RESTORING: + raise HTTPException(status_code=409, detail="Restore is already running") + + for backup_item in restic.snapshot_list: + if backup_item["short_id"] == backup.backupId: + restic_tasks.restore_from_backup(backup.backupId) + return { + "status": 0, + "message": "Backup restoration procedure started", + } + + raise HTTPException(status_code=404, detail="Backup not found") + + +class BackblazeConfigInput(BaseModel): + accountId: str + accountKey: str + bucket: str + + +@router.put("/restic/backblaze/config") +async def set_backblaze_config(backblaze_config: BackblazeConfigInput): + with WriteUserData() as data: + if "backblaze" not in data: + data["backblaze"] = {} + data["backblaze"]["accountId"] = backblaze_config.accountId + data["backblaze"]["accountKey"] = backblaze_config.accountKey + data["backblaze"]["bucket"] = backblaze_config.bucket + + restic_tasks.update_keys_from_userdata() + + return "New Backblaze settings saved" + + +@router.post("/ssh/enable") +async def rest_enable_ssh(): + """Enable SSH""" + enable_ssh() + return { + "status": 0, + "message": "SSH enabled", + } + + +@router.get("/ssh") +async def rest_get_ssh(): + """Get the SSH configuration""" + settings = get_ssh_settings() + return { + "enable": settings.enable, + "passwordAuthentication": settings.passwordAuthentication, + } + + +class SshConfigInput(BaseModel): + enable: Optional[bool] = None + passwordAuthentication: Optional[bool] = None + + +@router.put("/ssh") +async def rest_set_ssh(ssh_config: SshConfigInput): + """Set the SSH configuration""" + set_ssh_settings(ssh_config.enable, ssh_config.passwordAuthentication) + + return "SSH settings changed" + + +class SshKeyInput(BaseModel): + public_key: str + + +@router.put("/ssh/key/send", status_code=201) +async def rest_send_ssh_key(input: SshKeyInput): + """Send the SSH key""" + try: + create_ssh_key("root", input.public_key) + except KeyAlreadyExists as error: + raise HTTPException(status_code=409, detail="Key already exists") from error + except InvalidPublicKey as error: + raise HTTPException( + status_code=400, + detail="Invalid key type. Only ssh-ed25519 and ssh-rsa are supported", + ) from error + + return { + "status": 0, + "message": "SSH key sent", + } + + +@router.get("/ssh/keys/{username}") +async def rest_get_ssh_keys(username: str): + """Get the SSH keys for a user""" + user = get_user_by_username(username) + if user is None: + raise HTTPException(status_code=404, detail="User not found") + + return user.ssh_keys + + +@router.post("/ssh/keys/{username}", status_code=201) +async def rest_add_ssh_key(username: str, input: SshKeyInput): + try: + create_ssh_key(username, input.public_key) + except KeyAlreadyExists as error: + raise HTTPException(status_code=409, detail="Key already exists") from error + except InvalidPublicKey as error: + raise HTTPException( + status_code=400, + detail="Invalid key type. Only ssh-ed25519 and ssh-rsa are supported", + ) from error + except UserNotFound as error: + raise HTTPException(status_code=404, detail="User not found") from error + + return { + "message": "New SSH key successfully written", + } + + +@router.delete("/ssh/keys/{username}") +async def rest_delete_ssh_key(username: str, input: SshKeyInput): + try: + remove_ssh_key(username, input.public_key) + except KeyNotFound as error: + raise HTTPException(status_code=404, detail="Key not found") from error + except UserNotFound as error: + raise HTTPException(status_code=404, detail="User not found") from error + return {"message": "SSH key deleted"} diff --git a/selfprivacy_api/rest/system.py b/selfprivacy_api/rest/system.py new file mode 100644 index 0000000..9933fb3 --- /dev/null +++ b/selfprivacy_api/rest/system.py @@ -0,0 +1,105 @@ +from typing import Optional +from fastapi import APIRouter, Body, Depends, HTTPException +from pydantic import BaseModel + +from selfprivacy_api.dependencies import get_token_header + +import selfprivacy_api.actions.system as system_actions + +router = APIRouter( + prefix="/system", + tags=["system"], + dependencies=[Depends(get_token_header)], + responses={404: {"description": "Not found"}}, +) + + +@router.get("/configuration/timezone") +async def get_timezone(): + """Get the timezone of the server""" + return system_actions.get_timezone() + + +class ChangeTimezoneRequestBody(BaseModel): + """Change the timezone of the server""" + + timezone: str + + +@router.put("/configuration/timezone") +async def change_timezone(timezone: ChangeTimezoneRequestBody): + """Change the timezone of the server""" + try: + system_actions.change_timezone(timezone.timezone) + except system_actions.InvalidTimezone as e: + raise HTTPException(status_code=400, detail=str(e)) + return {"timezone": timezone.timezone} + + +@router.get("/configuration/autoUpgrade") +async def get_auto_upgrade_settings(): + """Get the auto-upgrade settings""" + return system_actions.get_auto_upgrade_settings().dict() + + +class AutoUpgradeSettings(BaseModel): + """Settings for auto-upgrading user data""" + + enable: Optional[bool] = None + allowReboot: Optional[bool] = None + + +@router.put("/configuration/autoUpgrade") +async def set_auto_upgrade_settings(settings: AutoUpgradeSettings): + """Set the auto-upgrade settings""" + system_actions.set_auto_upgrade_settings(settings.enable, settings.allowReboot) + return "Auto-upgrade settings changed" + + +@router.get("/configuration/apply") +async def apply_configuration(): + """Apply the configuration""" + return_code = system_actions.rebuild_system() + return return_code + + +@router.get("/configuration/rollback") +async def rollback_configuration(): + """Rollback the configuration""" + return_code = system_actions.rollback_system() + return return_code + + +@router.get("/configuration/upgrade") +async def upgrade_configuration(): + """Upgrade the configuration""" + return_code = system_actions.upgrade_system() + return return_code + + +@router.get("/reboot") +async def reboot_system(): + """Reboot the system""" + system_actions.reboot_system() + return "System reboot has started" + + +@router.get("/version") +async def get_system_version(): + """Get the system version""" + return {"system_version": system_actions.get_system_version()} + + +@router.get("/pythonVersion") +async def get_python_version(): + """Get the Python version""" + return system_actions.get_python_version() + + +@router.get("/configuration/pull") +async def pull_configuration(): + """Pull the configuration""" + action_result = system_actions.pull_repository_changes() + if action_result.status == 0: + return action_result.dict() + raise HTTPException(status_code=500, detail=action_result.dict()) diff --git a/selfprivacy_api/rest/users.py b/selfprivacy_api/rest/users.py new file mode 100644 index 0000000..ab4c6c9 --- /dev/null +++ b/selfprivacy_api/rest/users.py @@ -0,0 +1,62 @@ +"""Users management module""" +from typing import Optional +from fastapi import APIRouter, Body, Depends, HTTPException +from pydantic import BaseModel + +import selfprivacy_api.actions.users as users_actions + +from selfprivacy_api.dependencies import get_token_header + +router = APIRouter( + prefix="/users", + tags=["users"], + dependencies=[Depends(get_token_header)], + responses={404: {"description": "Not found"}}, +) + + +@router.get("") +async def get_users(withMainUser: bool = False): + """Get the list of users""" + users: list[users_actions.UserDataUser] = users_actions.get_users( + exclude_primary=not withMainUser, exclude_root=True + ) + + return [user.username for user in users] + + +class UserInput(BaseModel): + """User input""" + + username: str + password: str + + +@router.post("", status_code=201) +async def create_user(user: UserInput): + try: + users_actions.create_user(user.username, user.password) + except users_actions.PasswordIsEmpty as e: + raise HTTPException(status_code=400, detail=str(e)) + except users_actions.UsernameForbidden as e: + raise HTTPException(status_code=409, detail=str(e)) + except users_actions.UsernameNotAlphanumeric as e: + raise HTTPException(status_code=400, detail=str(e)) + except users_actions.UsernameTooLong as e: + raise HTTPException(status_code=400, detail=str(e)) + except users_actions.UserAlreadyExists as e: + raise HTTPException(status_code=409, detail=str(e)) + + return {"result": 0, "username": user.username} + + +@router.delete("/{username}") +async def delete_user(username: str): + try: + users_actions.delete_user(username) + except users_actions.UserNotFound as e: + raise HTTPException(status_code=404, detail=str(e)) + except users_actions.UserIsProtected as e: + raise HTTPException(status_code=400, detail=str(e)) + + return {"result": 0, "username": username} diff --git a/selfprivacy_api/restic_controller/tasks.py b/selfprivacy_api/restic_controller/tasks.py index 4c610c4..f583d8b 100644 --- a/selfprivacy_api/restic_controller/tasks.py +++ b/selfprivacy_api/restic_controller/tasks.py @@ -1,10 +1,8 @@ """Tasks for the restic controller.""" from huey import crontab -from huey.contrib.mini import MiniHuey +from selfprivacy_api.utils.huey import huey from . import ResticController, ResticStates -huey = MiniHuey() - @huey.task() def init_restic(): diff --git a/selfprivacy_api/services/__init__.py b/selfprivacy_api/services/__init__.py new file mode 100644 index 0000000..a688734 --- /dev/null +++ b/selfprivacy_api/services/__init__.py @@ -0,0 +1,67 @@ +"""Services module.""" + +import typing +from selfprivacy_api.services.bitwarden import Bitwarden +from selfprivacy_api.services.gitea import Gitea +from selfprivacy_api.services.jitsi import Jitsi +from selfprivacy_api.services.mailserver import MailServer +from selfprivacy_api.services.nextcloud import Nextcloud +from selfprivacy_api.services.pleroma import Pleroma +from selfprivacy_api.services.ocserv import Ocserv +from selfprivacy_api.services.service import Service, ServiceDnsRecord +import selfprivacy_api.utils.network as network_utils + +services: list[Service] = [ + Bitwarden(), + Gitea(), + MailServer(), + Nextcloud(), + Pleroma(), + Ocserv(), + Jitsi(), +] + + +def get_all_services() -> list[Service]: + return services + + +def get_service_by_id(service_id: str) -> typing.Optional[Service]: + for service in services: + if service.get_id() == service_id: + return service + return None + + +def get_enabled_services() -> list[Service]: + return [service for service in services if service.is_enabled()] + + +def get_disabled_services() -> list[Service]: + return [service for service in services if not service.is_enabled()] + + +def get_services_by_location(location: str) -> list[Service]: + return [service for service in services if service.get_location() == location] + + +def get_all_required_dns_records() -> list[ServiceDnsRecord]: + ip4 = network_utils.get_ip4() + ip6 = network_utils.get_ip6() + dns_records: list[ServiceDnsRecord] = [ + ServiceDnsRecord( + type="A", + name="api", + content=ip4, + ttl=3600, + ), + ServiceDnsRecord( + type="AAAA", + name="api", + content=ip6, + ttl=3600, + ), + ] + for service in get_enabled_services(): + dns_records += service.get_dns_records() + return dns_records diff --git a/selfprivacy_api/services/bitwarden/__init__.py b/selfprivacy_api/services/bitwarden/__init__.py new file mode 100644 index 0000000..ea93de1 --- /dev/null +++ b/selfprivacy_api/services/bitwarden/__init__.py @@ -0,0 +1,174 @@ +"""Class representing Bitwarden service""" +import base64 +import subprocess +import typing + +from selfprivacy_api.jobs import Job, JobStatus, Jobs +from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service +from selfprivacy_api.services.generic_size_counter import get_storage_usage +from selfprivacy_api.services.generic_status_getter import get_service_status +from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus +from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain +from selfprivacy_api.utils.block_devices import BlockDevice +from selfprivacy_api.utils.huey import huey +import selfprivacy_api.utils.network as network_utils +from selfprivacy_api.services.bitwarden.icon import BITWARDEN_ICON + + +class Bitwarden(Service): + """Class representing Bitwarden service.""" + + @staticmethod + def get_id() -> str: + """Return service id.""" + return "bitwarden" + + @staticmethod + def get_display_name() -> str: + """Return service display name.""" + return "Bitwarden" + + @staticmethod + def get_description() -> str: + """Return service description.""" + return "Bitwarden is a password manager." + + @staticmethod + def get_svg_icon() -> str: + """Read SVG icon from file and return it as base64 encoded string.""" + return base64.b64encode(BITWARDEN_ICON.encode("utf-8")).decode("utf-8") + + @staticmethod + def get_url() -> typing.Optional[str]: + """Return service url.""" + domain = get_domain() + return f"https://password.{domain}" + + @staticmethod + def is_movable() -> bool: + return True + + @staticmethod + def is_required() -> bool: + return False + + @staticmethod + def is_enabled() -> bool: + with ReadUserData() as user_data: + return user_data.get("bitwarden", {}).get("enable", False) + + @staticmethod + def get_status() -> ServiceStatus: + """ + Return Bitwarden status from systemd. + Use command return code to determine status. + + Return code 0 means service is running. + Return code 1 or 2 means service is in error stat. + Return code 3 means service is stopped. + Return code 4 means service is off. + """ + return get_service_status("vaultwarden.service") + + @staticmethod + def enable(): + """Enable Bitwarden service.""" + with WriteUserData() as user_data: + if "bitwarden" not in user_data: + user_data["bitwarden"] = {} + user_data["bitwarden"]["enable"] = True + + @staticmethod + def disable(): + """Disable Bitwarden service.""" + with WriteUserData() as user_data: + if "bitwarden" not in user_data: + user_data["bitwarden"] = {} + user_data["bitwarden"]["enable"] = False + + @staticmethod + def stop(): + subprocess.run(["systemctl", "stop", "vaultwarden.service"]) + + @staticmethod + def start(): + subprocess.run(["systemctl", "start", "vaultwarden.service"]) + + @staticmethod + def restart(): + subprocess.run(["systemctl", "restart", "vaultwarden.service"]) + + @staticmethod + def get_configuration(): + return {} + + @staticmethod + def set_configuration(config_items): + return super().set_configuration(config_items) + + @staticmethod + def get_logs(): + return "" + + @staticmethod + def get_storage_usage() -> int: + storage_usage = 0 + storage_usage += get_storage_usage("/var/lib/bitwarden") + storage_usage += get_storage_usage("/var/lib/bitwarden_rs") + return storage_usage + + @staticmethod + def get_location() -> str: + with ReadUserData() as user_data: + if user_data.get("useBinds", False): + return user_data.get("bitwarden", {}).get("location", "sda1") + else: + return "sda1" + + @staticmethod + def get_dns_records() -> typing.List[ServiceDnsRecord]: + """Return list of DNS records for Bitwarden service.""" + return [ + ServiceDnsRecord( + type="A", + name="password", + content=network_utils.get_ip4(), + ttl=3600, + ), + ServiceDnsRecord( + type="AAAA", + name="password", + content=network_utils.get_ip6(), + ttl=3600, + ), + ] + + def move_to_volume(self, volume: BlockDevice) -> Job: + job = Jobs.get_instance().add( + type_id="services.bitwarden.move", + name="Move Bitwarden", + description=f"Moving Bitwarden data to {volume.name}", + ) + + move_service( + self, + volume, + job, + [ + FolderMoveNames( + name="bitwarden", + bind_location="/var/lib/bitwarden", + group="vaultwarden", + owner="vaultwarden", + ), + FolderMoveNames( + name="bitwarden_rs", + bind_location="/var/lib/bitwarden_rs", + group="vaultwarden", + owner="vaultwarden", + ), + ], + "bitwarden", + ) + + return job diff --git a/selfprivacy_api/services/bitwarden/bitwarden.svg b/selfprivacy_api/services/bitwarden/bitwarden.svg new file mode 100644 index 0000000..ced270c --- /dev/null +++ b/selfprivacy_api/services/bitwarden/bitwarden.svg @@ -0,0 +1,3 @@ + + + diff --git a/selfprivacy_api/services/bitwarden/icon.py b/selfprivacy_api/services/bitwarden/icon.py new file mode 100644 index 0000000..f9280e0 --- /dev/null +++ b/selfprivacy_api/services/bitwarden/icon.py @@ -0,0 +1,5 @@ +BITWARDEN_ICON = """ + + + +""" diff --git a/selfprivacy_api/services/generic_service_mover.py b/selfprivacy_api/services/generic_service_mover.py new file mode 100644 index 0000000..c7d7c3b --- /dev/null +++ b/selfprivacy_api/services/generic_service_mover.py @@ -0,0 +1,237 @@ +"""Generic handler for moving services""" + +import subprocess +import time +import pathlib +import shutil + +from pydantic import BaseModel +from selfprivacy_api.jobs import Job, JobStatus, Jobs +from selfprivacy_api.utils.huey import huey +from selfprivacy_api.utils.block_devices import BlockDevice +from selfprivacy_api.utils import ReadUserData, WriteUserData +from selfprivacy_api.services.service import Service, ServiceStatus + + +class FolderMoveNames(BaseModel): + name: str + bind_location: str + owner: str + group: str + + +@huey.task() +def move_service( + service: Service, + volume: BlockDevice, + job: Job, + folder_names: list[FolderMoveNames], + userdata_location: str, +): + """Move a service to another volume.""" + job = Jobs.get_instance().update( + job=job, + status_text="Performing pre-move checks...", + status=JobStatus.RUNNING, + ) + service_name = service.get_display_name() + with ReadUserData() as user_data: + if not user_data.get("useBinds", False): + Jobs.get_instance().update( + job=job, + status=JobStatus.ERROR, + error="Server is not using binds.", + ) + return + # Check if we are on the same volume + old_volume = service.get_location() + if old_volume == volume.name: + Jobs.get_instance().update( + job=job, + status=JobStatus.ERROR, + error=f"{service_name} is already on this volume.", + ) + return + # Check if there is enough space on the new volume + if int(volume.fsavail) < service.get_storage_usage(): + Jobs.get_instance().update( + job=job, + status=JobStatus.ERROR, + error="Not enough space on the new volume.", + ) + return + # Make sure the volume is mounted + if f"/volumes/{volume.name}" not in volume.mountpoints: + Jobs.get_instance().update( + job=job, + status=JobStatus.ERROR, + error="Volume is not mounted.", + ) + return + # Make sure current actual directory exists and if its user and group are correct + for folder in folder_names: + if not pathlib.Path(f"/volumes/{old_volume}/{folder.name}").exists(): + Jobs.get_instance().update( + job=job, + status=JobStatus.ERROR, + error=f"{service_name} is not found.", + ) + return + if not pathlib.Path(f"/volumes/{old_volume}/{folder.name}").is_dir(): + Jobs.get_instance().update( + job=job, + status=JobStatus.ERROR, + error=f"{service_name} is not a directory.", + ) + return + if ( + not pathlib.Path(f"/volumes/{old_volume}/{folder.name}").owner() + == folder.owner + ): + Jobs.get_instance().update( + job=job, + status=JobStatus.ERROR, + error=f"{service_name} owner is not {folder.owner}.", + ) + return + + # Stop service + Jobs.get_instance().update( + job=job, + status=JobStatus.RUNNING, + status_text=f"Stopping {service_name}...", + progress=5, + ) + service.stop() + # Wait for the service to stop, check every second + # If it does not stop in 30 seconds, abort + for _ in range(30): + if service.get_status() not in ( + ServiceStatus.ACTIVATING, + ServiceStatus.DEACTIVATING, + ): + break + time.sleep(1) + else: + Jobs.get_instance().update( + job=job, + status=JobStatus.ERROR, + error=f"{service_name} did not stop in 30 seconds.", + ) + return + + # Unmount old volume + Jobs.get_instance().update( + job=job, + status_text="Unmounting old folder...", + status=JobStatus.RUNNING, + progress=10, + ) + for folder in folder_names: + try: + subprocess.run( + ["umount", folder.bind_location], + check=True, + ) + except subprocess.CalledProcessError: + Jobs.get_instance().update( + job=job, + status=JobStatus.ERROR, + error="Unable to unmount old volume.", + ) + return + # Move data to new volume and set correct permissions + Jobs.get_instance().update( + job=job, + status_text="Moving data to new volume...", + status=JobStatus.RUNNING, + progress=20, + ) + current_progress = 20 + folder_percentage = 50 // len(folder_names) + for folder in folder_names: + shutil.move( + f"/volumes/{old_volume}/{folder.name}", + f"/volumes/{volume.name}/{folder.name}", + ) + Jobs.get_instance().update( + job=job, + status_text="Moving data to new volume...", + status=JobStatus.RUNNING, + progress=current_progress + folder_percentage, + ) + + Jobs.get_instance().update( + job=job, + status_text=f"Making sure {service_name} owns its files...", + status=JobStatus.RUNNING, + progress=70, + ) + for folder in folder_names: + try: + subprocess.run( + [ + "chown", + "-R", + f"{folder.owner}:f{folder.group}", + f"/volumes/{volume.name}/{folder.name}", + ], + check=True, + ) + except subprocess.CalledProcessError as error: + print(error.output) + Jobs.get_instance().update( + job=job, + status=JobStatus.RUNNING, + error=f"Unable to set ownership of new volume. {service_name} may not be able to access its files. Continuing anyway.", + ) + return + + # Mount new volume + Jobs.get_instance().update( + job=job, + status_text=f"Mounting {service_name} data...", + status=JobStatus.RUNNING, + progress=90, + ) + + for folder in folder_names: + try: + subprocess.run( + [ + "mount", + "--bind", + f"/volumes/{volume.name}/{folder.name}", + folder.bind_location, + ], + check=True, + ) + except subprocess.CalledProcessError as error: + print(error.output) + Jobs.get_instance().update( + job=job, + status=JobStatus.ERROR, + error="Unable to mount new volume.", + ) + return + + # Update userdata + Jobs.get_instance().update( + job=job, + status_text="Finishing move...", + status=JobStatus.RUNNING, + progress=95, + ) + with WriteUserData() as user_data: + if userdata_location not in user_data: + user_data[userdata_location] = {} + user_data[userdata_location]["location"] = volume.name + # Start service + service.start() + Jobs.get_instance().update( + job=job, + status=JobStatus.FINISHED, + result=f"{service_name} moved successfully.", + status_text=f"Starting {service}...", + progress=100, + ) diff --git a/selfprivacy_api/services/generic_size_counter.py b/selfprivacy_api/services/generic_size_counter.py new file mode 100644 index 0000000..4a706fb --- /dev/null +++ b/selfprivacy_api/services/generic_size_counter.py @@ -0,0 +1,16 @@ +"""Generic size counter using pathlib""" +import pathlib + + +def get_storage_usage(path: str) -> int: + """ + Calculate the real storage usage of path and all subdirectories. + Calculate using pathlib. + Do not follow symlinks. + """ + storage_usage = 0 + for iter_path in pathlib.Path(path).rglob("**/*"): + if iter_path.is_dir(): + continue + storage_usage += iter_path.stat().st_size + return storage_usage diff --git a/selfprivacy_api/services/generic_status_getter.py b/selfprivacy_api/services/generic_status_getter.py new file mode 100644 index 0000000..46720af --- /dev/null +++ b/selfprivacy_api/services/generic_status_getter.py @@ -0,0 +1,60 @@ +"""Generic service status fetcher using systemctl""" +import subprocess + +from selfprivacy_api.services.service import ServiceStatus + + +def get_service_status(service: str) -> ServiceStatus: + """ + Return service status from systemd. + Use systemctl show to get the status of a service. + Get ActiveState from the output. + """ + service_status = subprocess.check_output(["systemctl", "show", service]) + if b"LoadState=not-found" in service_status: + return ServiceStatus.OFF + if b"ActiveState=active" in service_status: + return ServiceStatus.ACTIVE + if b"ActiveState=inactive" in service_status: + return ServiceStatus.INACTIVE + if b"ActiveState=activating" in service_status: + return ServiceStatus.ACTIVATING + if b"ActiveState=deactivating" in service_status: + return ServiceStatus.DEACTIVATING + if b"ActiveState=failed" in service_status: + return ServiceStatus.FAILED + if b"ActiveState=reloading" in service_status: + return ServiceStatus.RELOADING + return ServiceStatus.OFF + + +def get_service_status_from_several_units(services: list[str]) -> ServiceStatus: + """ + Fetch all service statuses for all services and return the worst status. + Statuses from worst to best: + - OFF + - FAILED + - RELOADING + - ACTIVATING + - DEACTIVATING + - INACTIVE + - ACTIVE + """ + service_statuses = [] + for service in services: + service_statuses.append(get_service_status(service)) + if ServiceStatus.OFF in service_statuses: + return ServiceStatus.OFF + if ServiceStatus.FAILED in service_statuses: + return ServiceStatus.FAILED + if ServiceStatus.RELOADING in service_statuses: + return ServiceStatus.RELOADING + if ServiceStatus.ACTIVATING in service_statuses: + return ServiceStatus.ACTIVATING + if ServiceStatus.DEACTIVATING in service_statuses: + return ServiceStatus.DEACTIVATING + if ServiceStatus.INACTIVE in service_statuses: + return ServiceStatus.INACTIVE + if ServiceStatus.ACTIVE in service_statuses: + return ServiceStatus.ACTIVE + return ServiceStatus.OFF diff --git a/selfprivacy_api/services/gitea/__init__.py b/selfprivacy_api/services/gitea/__init__.py new file mode 100644 index 0000000..d563164 --- /dev/null +++ b/selfprivacy_api/services/gitea/__init__.py @@ -0,0 +1,165 @@ +"""Class representing Bitwarden service""" +import base64 +import subprocess +import typing + +from selfprivacy_api.jobs import Job, Jobs +from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service +from selfprivacy_api.services.generic_size_counter import get_storage_usage +from selfprivacy_api.services.generic_status_getter import get_service_status +from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus +from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain +from selfprivacy_api.utils.block_devices import BlockDevice +from selfprivacy_api.utils.huey import huey +import selfprivacy_api.utils.network as network_utils +from selfprivacy_api.services.gitea.icon import GITEA_ICON + + +class Gitea(Service): + """Class representing Gitea service""" + + @staticmethod + def get_id() -> str: + """Return service id.""" + return "gitea" + + @staticmethod + def get_display_name() -> str: + """Return service display name.""" + return "Gitea" + + @staticmethod + def get_description() -> str: + """Return service description.""" + return "Gitea is a Git forge." + + @staticmethod + def get_svg_icon() -> str: + """Read SVG icon from file and return it as base64 encoded string.""" + return base64.b64encode(GITEA_ICON.encode("utf-8")).decode("utf-8") + + @staticmethod + def get_url() -> typing.Optional[str]: + """Return service url.""" + domain = get_domain() + return f"https://git.{domain}" + + @staticmethod + def is_movable() -> bool: + return True + + @staticmethod + def is_required() -> bool: + return False + + @staticmethod + def is_enabled() -> bool: + with ReadUserData() as user_data: + return user_data.get("gitea", {}).get("enable", False) + + @staticmethod + def get_status() -> ServiceStatus: + """ + Return Gitea status from systemd. + Use command return code to determine status. + Return code 0 means service is running. + Return code 1 or 2 means service is in error stat. + Return code 3 means service is stopped. + Return code 4 means service is off. + """ + return get_service_status("gitea.service") + + @staticmethod + def enable(): + """Enable Gitea service.""" + with WriteUserData() as user_data: + if "gitea" not in user_data: + user_data["gitea"] = {} + user_data["gitea"]["enable"] = True + + @staticmethod + def disable(): + """Disable Gitea service.""" + with WriteUserData() as user_data: + if "gitea" not in user_data: + user_data["gitea"] = {} + user_data["gitea"]["enable"] = False + + @staticmethod + def stop(): + subprocess.run(["systemctl", "stop", "gitea.service"]) + + @staticmethod + def start(): + subprocess.run(["systemctl", "start", "gitea.service"]) + + @staticmethod + def restart(): + subprocess.run(["systemctl", "restart", "gitea.service"]) + + @staticmethod + def get_configuration(): + return {} + + @staticmethod + def set_configuration(config_items): + return super().set_configuration(config_items) + + @staticmethod + def get_logs(): + return "" + + @staticmethod + def get_storage_usage() -> int: + storage_usage = 0 + storage_usage += get_storage_usage("/var/lib/gitea") + return storage_usage + + @staticmethod + def get_location() -> str: + with ReadUserData() as user_data: + if user_data.get("useBinds", False): + return user_data.get("gitea", {}).get("location", "sda1") + else: + return "sda1" + + @staticmethod + def get_dns_records() -> typing.List[ServiceDnsRecord]: + return [ + ServiceDnsRecord( + type="A", + name="git", + content=network_utils.get_ip4(), + ttl=3600, + ), + ServiceDnsRecord( + type="AAAA", + name="git", + content=network_utils.get_ip6(), + ttl=3600, + ), + ] + + def move_to_volume(self, volume: BlockDevice) -> Job: + job = Jobs.get_instance().add( + type_id="services.gitea.move", + name="Move Gitea", + description=f"Moving Gitea data to {volume.name}", + ) + + move_service( + self, + volume, + job, + [ + FolderMoveNames( + name="gitea", + bind_location="/var/lib/gitea", + group="gitea", + owner="gitea", + ), + ], + "bitwarden", + ) + + return job diff --git a/selfprivacy_api/services/gitea/gitea.svg b/selfprivacy_api/services/gitea/gitea.svg new file mode 100644 index 0000000..9ba8a76 --- /dev/null +++ b/selfprivacy_api/services/gitea/gitea.svg @@ -0,0 +1,3 @@ + + + diff --git a/selfprivacy_api/services/gitea/icon.py b/selfprivacy_api/services/gitea/icon.py new file mode 100644 index 0000000..569f96a --- /dev/null +++ b/selfprivacy_api/services/gitea/icon.py @@ -0,0 +1,5 @@ +GITEA_ICON = """ + + + +""" diff --git a/selfprivacy_api/services/jitsi/__init__.py b/selfprivacy_api/services/jitsi/__init__.py new file mode 100644 index 0000000..6b3a973 --- /dev/null +++ b/selfprivacy_api/services/jitsi/__init__.py @@ -0,0 +1,142 @@ +"""Class representing Jitsi service""" +import base64 +import subprocess +import typing + +from selfprivacy_api.jobs import Job, Jobs +from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service +from selfprivacy_api.services.generic_size_counter import get_storage_usage +from selfprivacy_api.services.generic_status_getter import ( + get_service_status, + get_service_status_from_several_units, +) +from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus +from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain +from selfprivacy_api.utils.block_devices import BlockDevice +from selfprivacy_api.utils.huey import huey +import selfprivacy_api.utils.network as network_utils +from selfprivacy_api.services.jitsi.icon import JITSI_ICON + + +class Jitsi(Service): + """Class representing Jitsi service""" + + @staticmethod + def get_id() -> str: + """Return service id.""" + return "jitsi" + + @staticmethod + def get_display_name() -> str: + """Return service display name.""" + return "Jitsi" + + @staticmethod + def get_description() -> str: + """Return service description.""" + return "Jitsi is a free and open-source video conferencing solution." + + @staticmethod + def get_svg_icon() -> str: + """Read SVG icon from file and return it as base64 encoded string.""" + return base64.b64encode(JITSI_ICON.encode("utf-8")).decode("utf-8") + + @staticmethod + def get_url() -> typing.Optional[str]: + """Return service url.""" + domain = get_domain() + return f"https://meet.{domain}" + + @staticmethod + def is_movable() -> bool: + return False + + @staticmethod + def is_required() -> bool: + return False + + @staticmethod + def is_enabled() -> bool: + with ReadUserData() as user_data: + return user_data.get("jitsi", {}).get("enable", False) + + @staticmethod + def get_status() -> ServiceStatus: + return get_service_status_from_several_units( + ["jitsi-videobridge.service", "jicofo.service"] + ) + + @staticmethod + def enable(): + """Enable Jitsi service.""" + with WriteUserData() as user_data: + if "jitsi" not in user_data: + user_data["jitsi"] = {} + user_data["jitsi"]["enable"] = True + + @staticmethod + def disable(): + """Disable Gitea service.""" + with WriteUserData() as user_data: + if "jitsi" not in user_data: + user_data["jitsi"] = {} + user_data["jitsi"]["enable"] = False + + @staticmethod + def stop(): + subprocess.run(["systemctl", "stop", "jitsi-videobridge.service"]) + subprocess.run(["systemctl", "stop", "jicofo.service"]) + + @staticmethod + def start(): + subprocess.run(["systemctl", "start", "jitsi-videobridge.service"]) + subprocess.run(["systemctl", "start", "jicofo.service"]) + + @staticmethod + def restart(): + subprocess.run(["systemctl", "restart", "jitsi-videobridge.service"]) + subprocess.run(["systemctl", "restart", "jicofo.service"]) + + @staticmethod + def get_configuration(): + return {} + + @staticmethod + def set_configuration(config_items): + return super().set_configuration(config_items) + + @staticmethod + def get_logs(): + return "" + + @staticmethod + def get_storage_usage() -> int: + storage_usage = 0 + storage_usage += get_storage_usage("/var/lib/jitsi-meet") + return storage_usage + + @staticmethod + def get_location() -> str: + return "sda1" + + @staticmethod + def get_dns_records() -> typing.List[ServiceDnsRecord]: + ip4 = network_utils.get_ip4() + ip6 = network_utils.get_ip6() + return [ + ServiceDnsRecord( + type="A", + name="meet", + content=ip4, + ttl=3600, + ), + ServiceDnsRecord( + type="AAAA", + name="meet", + content=ip6, + ttl=3600, + ), + ] + + def move_to_volume(self, volume: BlockDevice) -> Job: + raise NotImplementedError("jitsi service is not movable") diff --git a/selfprivacy_api/services/jitsi/icon.py b/selfprivacy_api/services/jitsi/icon.py new file mode 100644 index 0000000..08bcbb1 --- /dev/null +++ b/selfprivacy_api/services/jitsi/icon.py @@ -0,0 +1,5 @@ +JITSI_ICON = """ + + + +""" diff --git a/selfprivacy_api/services/mailserver/__init__.py b/selfprivacy_api/services/mailserver/__init__.py new file mode 100644 index 0000000..1a72f33 --- /dev/null +++ b/selfprivacy_api/services/mailserver/__init__.py @@ -0,0 +1,179 @@ +"""Class representing Dovecot and Postfix services""" + +import base64 +import subprocess +import typing + +from selfprivacy_api.jobs import Job, JobStatus, Jobs +from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service +from selfprivacy_api.services.generic_size_counter import get_storage_usage +from selfprivacy_api.services.generic_status_getter import ( + get_service_status, + get_service_status_from_several_units, +) +from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus +import selfprivacy_api.utils as utils +from selfprivacy_api.utils.block_devices import BlockDevice +from selfprivacy_api.utils.huey import huey +import selfprivacy_api.utils.network as network_utils +from selfprivacy_api.services.mailserver.icon import MAILSERVER_ICON + + +class MailServer(Service): + """Class representing mail service""" + + @staticmethod + def get_id() -> str: + return "mailserver" + + @staticmethod + def get_display_name() -> str: + return "Mail Server" + + @staticmethod + def get_description() -> str: + return "E-Mail for company and family." + + @staticmethod + def get_svg_icon() -> str: + return base64.b64encode(MAILSERVER_ICON.encode("utf-8")).decode("utf-8") + + @staticmethod + def get_url() -> typing.Optional[str]: + """Return service url.""" + return None + + @staticmethod + def is_movable() -> bool: + return True + + @staticmethod + def is_required() -> bool: + return True + + @staticmethod + def is_enabled() -> bool: + return True + + @staticmethod + def get_status() -> ServiceStatus: + return get_service_status_from_several_units( + ["dovecot2.service", "postfix.service"] + ) + + @staticmethod + def enable(): + raise NotImplementedError("enable is not implemented for MailServer") + + @staticmethod + def disable(): + raise NotImplementedError("disable is not implemented for MailServer") + + @staticmethod + def stop(): + subprocess.run(["systemctl", "stop", "dovecot2.service"]) + subprocess.run(["systemctl", "stop", "postfix.service"]) + + @staticmethod + def start(): + subprocess.run(["systemctl", "start", "dovecot2.service"]) + subprocess.run(["systemctl", "start", "postfix.service"]) + + @staticmethod + def restart(): + subprocess.run(["systemctl", "restart", "dovecot2.service"]) + subprocess.run(["systemctl", "restart", "postfix.service"]) + + @staticmethod + def get_configuration(): + return {} + + @staticmethod + def set_configuration(config_items): + return super().set_configuration(config_items) + + @staticmethod + def get_logs(): + return "" + + @staticmethod + def get_storage_usage() -> int: + return get_storage_usage("/var/vmail") + + @staticmethod + def get_location() -> str: + with utils.ReadUserData() as user_data: + if user_data.get("useBinds", False): + return user_data.get("mailserver", {}).get("location", "sda1") + else: + return "sda1" + + @staticmethod + def get_dns_records() -> typing.List[ServiceDnsRecord]: + domain = utils.get_domain() + dkim_record = utils.get_dkim_key(domain) + ip4 = network_utils.get_ip4() + ip6 = network_utils.get_ip6() + + if dkim_record is None: + return [] + + return [ + ServiceDnsRecord( + type="A", + name=domain, + content=ip4, + ttl=3600, + ), + ServiceDnsRecord( + type="AAAA", + name=domain, + content=ip6, + ttl=3600, + ), + ServiceDnsRecord( + type="MX", name=domain, content=domain, ttl=3600, priority=10 + ), + ServiceDnsRecord( + type="TXT", name="_dmarc", content=f"v=DMARC1; p=none", ttl=18000 + ), + ServiceDnsRecord( + type="TXT", + name=domain, + content=f"v=spf1 a mx ip4:{ip4} -all", + ttl=18000, + ), + ServiceDnsRecord( + type="TXT", name="selector._domainkey", content=dkim_record, ttl=18000 + ), + ] + + def move_to_volume(self, volume: BlockDevice) -> Job: + job = Jobs.get_instance().add( + type_id="services.mailserver.move", + name="Move Mail Server", + description=f"Moving mailserver data to {volume.name}", + ) + + move_service( + self, + volume, + job, + [ + FolderMoveNames( + name="vmail", + bind_location="/var/vmail", + group="virtualMail", + owner="virtualMail", + ), + FolderMoveNames( + name="sieve", + bind_location="/var/sieve", + group="virtualMail", + owner="virtualMail", + ), + ], + "mailserver", + ) + + return job diff --git a/selfprivacy_api/services/mailserver/icon.py b/selfprivacy_api/services/mailserver/icon.py new file mode 100644 index 0000000..a688ef3 --- /dev/null +++ b/selfprivacy_api/services/mailserver/icon.py @@ -0,0 +1,5 @@ +MAILSERVER_ICON = """ + + + +""" diff --git a/selfprivacy_api/services/mailserver/mailserver.svg b/selfprivacy_api/services/mailserver/mailserver.svg new file mode 100644 index 0000000..d7d0ee2 --- /dev/null +++ b/selfprivacy_api/services/mailserver/mailserver.svg @@ -0,0 +1,3 @@ + + + diff --git a/selfprivacy_api/services/nextcloud/__init__.py b/selfprivacy_api/services/nextcloud/__init__.py new file mode 100644 index 0000000..4057b49 --- /dev/null +++ b/selfprivacy_api/services/nextcloud/__init__.py @@ -0,0 +1,171 @@ +"""Class representing Nextcloud service.""" +import base64 +import subprocess +import typing +from selfprivacy_api.jobs import Job, Jobs +from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service +from selfprivacy_api.services.generic_size_counter import get_storage_usage +from selfprivacy_api.services.generic_status_getter import get_service_status +from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus +from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain +from selfprivacy_api.utils.block_devices import BlockDevice +import selfprivacy_api.utils.network as network_utils +from selfprivacy_api.services.nextcloud.icon import NEXTCLOUD_ICON + + +class Nextcloud(Service): + """Class representing Nextcloud service.""" + + @staticmethod + def get_id() -> str: + """Return service id.""" + return "nextcloud" + + @staticmethod + def get_display_name() -> str: + """Return service display name.""" + return "Nextcloud" + + @staticmethod + def get_description() -> str: + """Return service description.""" + return "Nextcloud is a cloud storage service that offers a web interface and a desktop client." + + @staticmethod + def get_svg_icon() -> str: + """Read SVG icon from file and return it as base64 encoded string.""" + return base64.b64encode(NEXTCLOUD_ICON.encode("utf-8")).decode("utf-8") + + @staticmethod + def get_url() -> typing.Optional[str]: + """Return service url.""" + domain = get_domain() + return f"https://cloud.{domain}" + + @staticmethod + def is_movable() -> bool: + return True + + @staticmethod + def is_required() -> bool: + return False + + @staticmethod + def is_enabled() -> bool: + with ReadUserData() as user_data: + return user_data.get("nextcloud", {}).get("enable", False) + + @staticmethod + def get_status() -> ServiceStatus: + """ + Return Nextcloud status from systemd. + Use command return code to determine status. + + Return code 0 means service is running. + Return code 1 or 2 means service is in error stat. + Return code 3 means service is stopped. + Return code 4 means service is off. + """ + return get_service_status("phpfpm-nextcloud.service") + + @staticmethod + def enable(): + """Enable Nextcloud service.""" + with WriteUserData() as user_data: + if "nextcloud" not in user_data: + user_data["nextcloud"] = {} + user_data["nextcloud"]["enable"] = True + + @staticmethod + def disable(): + """Disable Nextcloud service.""" + with WriteUserData() as user_data: + if "nextcloud" not in user_data: + user_data["nextcloud"] = {} + user_data["nextcloud"]["enable"] = False + + @staticmethod + def stop(): + """Stop Nextcloud service.""" + subprocess.Popen(["systemctl", "stop", "phpfpm-nextcloud.service"]) + + @staticmethod + def start(): + """Start Nextcloud service.""" + subprocess.Popen(["systemctl", "start", "phpfpm-nextcloud.service"]) + + @staticmethod + def restart(): + """Restart Nextcloud service.""" + subprocess.Popen(["systemctl", "restart", "phpfpm-nextcloud.service"]) + + @staticmethod + def get_configuration() -> dict: + """Return Nextcloud configuration.""" + return {} + + @staticmethod + def set_configuration(config_items): + return super().set_configuration(config_items) + + @staticmethod + def get_logs(): + """Return Nextcloud logs.""" + return "" + + @staticmethod + def get_storage_usage() -> int: + """ + Calculate the real storage usage of /var/lib/nextcloud and all subdirectories. + Calculate using pathlib. + Do not follow symlinks. + """ + return get_storage_usage("/var/lib/nextcloud") + + @staticmethod + def get_location() -> str: + """Get the name of disk where Nextcloud is installed.""" + with ReadUserData() as user_data: + if user_data.get("useBinds", False): + return user_data.get("nextcloud", {}).get("location", "sda1") + else: + return "sda1" + + @staticmethod + def get_dns_records() -> typing.List[ServiceDnsRecord]: + return [ + ServiceDnsRecord( + type="A", + name="cloud", + content=network_utils.get_ip4(), + ttl=3600, + ), + ServiceDnsRecord( + type="AAAA", + name="cloud", + content=network_utils.get_ip6(), + ttl=3600, + ), + ] + + def move_to_volume(self, volume: BlockDevice) -> Job: + job = Jobs.get_instance().add( + type_id="services.nextcloud.move", + name="Move Nextcloud", + description=f"Moving Nextcloud to volume {volume.name}", + ) + move_service( + self, + volume, + job, + [ + FolderMoveNames( + name="nextcloud", + bind_location="/var/lib/nextcloud", + owner="nextcloud", + group="nextcloud", + ), + ], + "nextcloud", + ) + return job diff --git a/selfprivacy_api/services/nextcloud/icon.py b/selfprivacy_api/services/nextcloud/icon.py new file mode 100644 index 0000000..d178640 --- /dev/null +++ b/selfprivacy_api/services/nextcloud/icon.py @@ -0,0 +1,12 @@ +NEXTCLOUD_ICON = """ + + + + + + + + + + +""" diff --git a/selfprivacy_api/services/nextcloud/nextcloud.svg b/selfprivacy_api/services/nextcloud/nextcloud.svg new file mode 100644 index 0000000..d7dbcb5 --- /dev/null +++ b/selfprivacy_api/services/nextcloud/nextcloud.svg @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/selfprivacy_api/services/ocserv/__init__.py b/selfprivacy_api/services/ocserv/__init__.py new file mode 100644 index 0000000..dcfacaa --- /dev/null +++ b/selfprivacy_api/services/ocserv/__init__.py @@ -0,0 +1,121 @@ +"""Class representing ocserv service.""" +import base64 +import subprocess +import typing +from selfprivacy_api.jobs import Job, Jobs +from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service +from selfprivacy_api.services.generic_size_counter import get_storage_usage +from selfprivacy_api.services.generic_status_getter import get_service_status +from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus +from selfprivacy_api.utils import ReadUserData, WriteUserData +from selfprivacy_api.utils.block_devices import BlockDevice +from selfprivacy_api.services.ocserv.icon import OCSERV_ICON +import selfprivacy_api.utils.network as network_utils + + +class Ocserv(Service): + """Class representing ocserv service.""" + + @staticmethod + def get_id() -> str: + return "ocserv" + + @staticmethod + def get_display_name() -> str: + return "OpenConnect VPN" + + @staticmethod + def get_description() -> str: + return "OpenConnect VPN to connect your devices and access the internet." + + @staticmethod + def get_svg_icon() -> str: + return base64.b64encode(OCSERV_ICON.encode("utf-8")).decode("utf-8") + + @staticmethod + def get_url() -> typing.Optional[str]: + """Return service url.""" + return None + + @staticmethod + def is_movable() -> bool: + return False + + @staticmethod + def is_required() -> bool: + return False + + @staticmethod + def is_enabled() -> bool: + with ReadUserData() as user_data: + return user_data.get("ocserv", {}).get("enable", False) + + @staticmethod + def get_status() -> ServiceStatus: + return get_service_status("ocserv.service") + + @staticmethod + def enable(): + with WriteUserData() as user_data: + if "ocserv" not in user_data: + user_data["ocserv"] = {} + user_data["ocserv"]["enable"] = True + + @staticmethod + def disable(): + with WriteUserData() as user_data: + if "ocserv" not in user_data: + user_data["ocserv"] = {} + user_data["ocserv"]["enable"] = False + + @staticmethod + def stop(): + subprocess.run(["systemctl", "stop", "ocserv.service"]) + + @staticmethod + def start(): + subprocess.run(["systemctl", "start", "ocserv.service"]) + + @staticmethod + def restart(): + subprocess.run(["systemctl", "restart", "ocserv.service"]) + + @staticmethod + def get_configuration(): + return {} + + @staticmethod + def set_configuration(config_items): + return super().set_configuration(config_items) + + @staticmethod + def get_logs(): + return "" + + @staticmethod + def get_location() -> str: + return "sda1" + + @staticmethod + def get_dns_records() -> typing.List[ServiceDnsRecord]: + return [ + ServiceDnsRecord( + type="A", + name="vpn", + content=network_utils.get_ip4(), + ttl=3600, + ), + ServiceDnsRecord( + type="AAAA", + name="vpn", + content=network_utils.get_ip6(), + ttl=3600, + ), + ] + + @staticmethod + def get_storage_usage() -> int: + return 0 + + def move_to_volume(self, volume: BlockDevice) -> Job: + raise NotImplementedError("ocserv service is not movable") diff --git a/selfprivacy_api/services/ocserv/icon.py b/selfprivacy_api/services/ocserv/icon.py new file mode 100644 index 0000000..6585c5e --- /dev/null +++ b/selfprivacy_api/services/ocserv/icon.py @@ -0,0 +1,5 @@ +OCSERV_ICON = """ + + + +""" diff --git a/selfprivacy_api/services/ocserv/ocserv.svg b/selfprivacy_api/services/ocserv/ocserv.svg new file mode 100644 index 0000000..288f743 --- /dev/null +++ b/selfprivacy_api/services/ocserv/ocserv.svg @@ -0,0 +1,3 @@ + + + diff --git a/selfprivacy_api/services/pleroma/__init__.py b/selfprivacy_api/services/pleroma/__init__.py new file mode 100644 index 0000000..97c11f5 --- /dev/null +++ b/selfprivacy_api/services/pleroma/__init__.py @@ -0,0 +1,157 @@ +"""Class representing Nextcloud service.""" +import base64 +import subprocess +import typing +from selfprivacy_api.jobs import Job, Jobs +from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service +from selfprivacy_api.services.generic_size_counter import get_storage_usage +from selfprivacy_api.services.generic_status_getter import get_service_status +from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus +from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain +from selfprivacy_api.utils.block_devices import BlockDevice +import selfprivacy_api.utils.network as network_utils +from selfprivacy_api.services.pleroma.icon import PLEROMA_ICON + + +class Pleroma(Service): + """Class representing Pleroma service.""" + + @staticmethod + def get_id() -> str: + return "pleroma" + + @staticmethod + def get_display_name() -> str: + return "Pleroma" + + @staticmethod + def get_description() -> str: + return "Pleroma is a microblogging service that offers a web interface and a desktop client." + + @staticmethod + def get_svg_icon() -> str: + return base64.b64encode(PLEROMA_ICON.encode("utf-8")).decode("utf-8") + + @staticmethod + def get_url() -> typing.Optional[str]: + """Return service url.""" + domain = get_domain() + return f"https://social.{domain}" + + @staticmethod + def is_movable() -> bool: + return True + + @staticmethod + def is_required() -> bool: + return False + + @staticmethod + def is_enabled() -> bool: + with ReadUserData() as user_data: + return user_data.get("pleroma", {}).get("enable", False) + + @staticmethod + def get_status() -> ServiceStatus: + return get_service_status("pleroma.service") + + @staticmethod + def enable(): + with WriteUserData() as user_data: + if "pleroma" not in user_data: + user_data["pleroma"] = {} + user_data["pleroma"]["enable"] = True + + @staticmethod + def disable(): + with WriteUserData() as user_data: + if "pleroma" not in user_data: + user_data["pleroma"] = {} + user_data["pleroma"]["enable"] = False + + @staticmethod + def stop(): + subprocess.run(["systemctl", "stop", "pleroma.service"]) + subprocess.run(["systemctl", "stop", "postgresql.service"]) + + @staticmethod + def start(): + subprocess.run(["systemctl", "start", "pleroma.service"]) + subprocess.run(["systemctl", "start", "postgresql.service"]) + + @staticmethod + def restart(): + subprocess.run(["systemctl", "restart", "pleroma.service"]) + subprocess.run(["systemctl", "restart", "postgresql.service"]) + + @staticmethod + def get_configuration(config_items): + return {} + + @staticmethod + def set_configuration(config_items): + return super().set_configuration(config_items) + + @staticmethod + def get_logs(): + return "" + + @staticmethod + def get_storage_usage() -> int: + storage_usage = 0 + storage_usage += get_storage_usage("/var/lib/pleroma") + storage_usage += get_storage_usage("/var/lib/postgresql") + return storage_usage + + @staticmethod + def get_location() -> str: + with ReadUserData() as user_data: + if user_data.get("useBinds", False): + return user_data.get("pleroma", {}).get("location", "sda1") + else: + return "sda1" + + @staticmethod + def get_dns_records() -> typing.List[ServiceDnsRecord]: + return [ + ServiceDnsRecord( + type="A", + name="social", + content=network_utils.get_ip4(), + ttl=3600, + ), + ServiceDnsRecord( + type="AAAA", + name="social", + content=network_utils.get_ip6(), + ttl=3600, + ), + ] + + def move_to_volume(self, volume: BlockDevice) -> Job: + job = Jobs.get_instance().add( + type_id="services.pleroma.move", + name="Move Pleroma", + description=f"Moving Pleroma to volume {volume.name}", + ) + move_service( + self, + volume, + job, + [ + FolderMoveNames( + name="pleroma", + bind_location="/var/lib/pleroma", + owner="pleroma", + group="pleroma", + ), + FolderMoveNames( + name="postgresql", + bind_location="/var/lib/postgresql", + owner="postgres", + group="postgres", + ), + ], + "pleroma", + ) + return job diff --git a/selfprivacy_api/services/pleroma/icon.py b/selfprivacy_api/services/pleroma/icon.py new file mode 100644 index 0000000..c0c4d2b --- /dev/null +++ b/selfprivacy_api/services/pleroma/icon.py @@ -0,0 +1,12 @@ +PLEROMA_ICON = """ + + + + + + + + + + +""" diff --git a/selfprivacy_api/services/pleroma/pleroma.svg b/selfprivacy_api/services/pleroma/pleroma.svg new file mode 100644 index 0000000..f87c438 --- /dev/null +++ b/selfprivacy_api/services/pleroma/pleroma.svg @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py new file mode 100644 index 0000000..515e28f --- /dev/null +++ b/selfprivacy_api/services/service.py @@ -0,0 +1,140 @@ +"""Abstract class for a service running on a server""" +from abc import ABC, abstractmethod +from enum import Enum +import typing + +from pydantic import BaseModel +from selfprivacy_api.jobs import Job + +from selfprivacy_api.utils.block_devices import BlockDevice + + +class ServiceStatus(Enum): + """Enum for service status""" + + ACTIVE = "ACTIVE" + RELOADING = "RELOADING" + INACTIVE = "INACTIVE" + FAILED = "FAILED" + ACTIVATING = "ACTIVATING" + DEACTIVATING = "DEACTIVATING" + OFF = "OFF" + + +class ServiceDnsRecord(BaseModel): + type: str + name: str + content: str + ttl: int + priority: typing.Optional[int] = None + + +class Service(ABC): + """ + Service here is some software that is hosted on the server and + can be installed, configured and used by a user. + """ + + @staticmethod + @abstractmethod + def get_id() -> str: + pass + + @staticmethod + @abstractmethod + def get_display_name() -> str: + pass + + @staticmethod + @abstractmethod + def get_description() -> str: + pass + + @staticmethod + @abstractmethod + def get_svg_icon() -> str: + pass + + @staticmethod + @abstractmethod + def get_url() -> typing.Optional[str]: + pass + + @staticmethod + @abstractmethod + def is_movable() -> bool: + pass + + @staticmethod + @abstractmethod + def is_required() -> bool: + pass + + @staticmethod + @abstractmethod + def is_enabled() -> bool: + pass + + @staticmethod + @abstractmethod + def get_status() -> ServiceStatus: + pass + + @staticmethod + @abstractmethod + def enable(): + pass + + @staticmethod + @abstractmethod + def disable(): + pass + + @staticmethod + @abstractmethod + def stop(): + pass + + @staticmethod + @abstractmethod + def start(): + pass + + @staticmethod + @abstractmethod + def restart(): + pass + + @staticmethod + @abstractmethod + def get_configuration(): + pass + + @staticmethod + @abstractmethod + def set_configuration(config_items): + pass + + @staticmethod + @abstractmethod + def get_logs(): + pass + + @staticmethod + @abstractmethod + def get_storage_usage() -> int: + pass + + @staticmethod + @abstractmethod + def get_dns_records() -> typing.List[ServiceDnsRecord]: + pass + + @staticmethod + @abstractmethod + def get_location() -> str: + pass + + @abstractmethod + def move_to_volume(self, volume: BlockDevice) -> Job: + pass diff --git a/selfprivacy_api/task_registry.py b/selfprivacy_api/task_registry.py new file mode 100644 index 0000000..82eaf06 --- /dev/null +++ b/selfprivacy_api/task_registry.py @@ -0,0 +1,4 @@ +from selfprivacy_api.utils.huey import huey +from selfprivacy_api.jobs.test import test_job +from selfprivacy_api.restic_controller.tasks import * +from selfprivacy_api.services.generic_service_mover import move_service diff --git a/selfprivacy_api/utils/__init__.py b/selfprivacy_api/utils/__init__.py index 5322fae..83213d7 100644 --- a/selfprivacy_api/utils/__init__.py +++ b/selfprivacy_api/utils/__init__.py @@ -1,12 +1,16 @@ #!/usr/bin/env python3 """Various utility functions""" +import datetime from enum import Enum import json +import os +import subprocess import portalocker USERDATA_FILE = "/etc/nixos/userdata/userdata.json" TOKENS_FILE = "/etc/nixos/userdata/tokens.json" +JOBS_FILE = "/etc/nixos/userdata/jobs.json" DOMAIN_FILE = "/var/domain" @@ -15,6 +19,7 @@ class UserDataFiles(Enum): USERDATA = 0 TOKENS = 1 + JOBS = 2 def get_domain(): @@ -32,6 +37,12 @@ class WriteUserData(object): self.userdata_file = open(USERDATA_FILE, "r+", encoding="utf-8") elif file_type == UserDataFiles.TOKENS: self.userdata_file = open(TOKENS_FILE, "r+", encoding="utf-8") + elif file_type == UserDataFiles.JOBS: + # Make sure file exists + if not os.path.exists(JOBS_FILE): + with open(JOBS_FILE, "w", encoding="utf-8") as jobs_file: + jobs_file.write("{}") + self.userdata_file = open(JOBS_FILE, "r+", encoding="utf-8") else: raise ValueError("Unknown file type") portalocker.lock(self.userdata_file, portalocker.LOCK_EX) @@ -57,12 +68,18 @@ class ReadUserData(object): self.userdata_file = open(USERDATA_FILE, "r", encoding="utf-8") elif file_type == UserDataFiles.TOKENS: self.userdata_file = open(TOKENS_FILE, "r", encoding="utf-8") + elif file_type == UserDataFiles.JOBS: + # Make sure file exists + if not os.path.exists(JOBS_FILE): + with open(JOBS_FILE, "w", encoding="utf-8") as jobs_file: + jobs_file.write("{}") + self.userdata_file = open(JOBS_FILE, "r", encoding="utf-8") else: raise ValueError("Unknown file type") portalocker.lock(self.userdata_file, portalocker.LOCK_SH) self.data = json.load(self.userdata_file) - def __enter__(self): + def __enter__(self) -> dict: return self.data def __exit__(self, *args): @@ -119,3 +136,54 @@ def is_username_forbidden(username): return True return False + + +def parse_date(date_str: str) -> datetime.datetime: + """Parse date string which can be in one of these formats: + - %Y-%m-%dT%H:%M:%S.%fZ + - %Y-%m-%dT%H:%M:%S.%f + - %Y-%m-%d %H:%M:%S.%fZ + - %Y-%m-%d %H:%M:%S.%f + """ + try: + return datetime.datetime.strptime(date_str, "%Y-%m-%d %H:%M:%S.%fZ") + except ValueError: + pass + try: + return datetime.datetime.strptime(date_str, "%Y-%m-%d %H:%M:%S.%f") + except ValueError: + pass + try: + return datetime.datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%S.%fZ") + except ValueError: + pass + try: + return datetime.datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%S.%f") + except ValueError: + pass + raise ValueError("Invalid date string") + + +def get_dkim_key(domain): + """Get DKIM key from /var/dkim/.selector.txt""" + if os.path.exists("/var/dkim/" + domain + ".selector.txt"): + cat_process = subprocess.Popen( + ["cat", "/var/dkim/" + domain + ".selector.txt"], stdout=subprocess.PIPE + ) + dkim = cat_process.communicate()[0] + return str(dkim, "utf-8") + return None + + +def hash_password(password): + hashing_command = ["mkpasswd", "-m", "sha-512", password] + password_hash_process_descriptor = subprocess.Popen( + hashing_command, + shell=False, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + ) + hashed_password = password_hash_process_descriptor.communicate()[0] + hashed_password = hashed_password.decode("ascii") + hashed_password = hashed_password.rstrip() + return hashed_password diff --git a/selfprivacy_api/utils/auth.py b/selfprivacy_api/utils/auth.py index 8e5a5b2..ecaf9af 100644 --- a/selfprivacy_api/utils/auth.py +++ b/selfprivacy_api/utils/auth.py @@ -3,10 +3,12 @@ import secrets from datetime import datetime, timedelta import re +import typing +from pydantic import BaseModel from mnemonic import Mnemonic -from . import ReadUserData, UserDataFiles, WriteUserData +from . import ReadUserData, UserDataFiles, WriteUserData, parse_date """ Token are stored in the tokens.json file. @@ -86,7 +88,7 @@ def is_token_name_pair_valid(token_name, token): return False -def get_token_name(token): +def get_token_name(token: str) -> typing.Optional[str]: """Return the name of the token provided""" with ReadUserData(UserDataFiles.TOKENS) as tokens: for t in tokens["tokens"]: @@ -95,11 +97,22 @@ def get_token_name(token): return None +class BasicTokenInfo(BaseModel): + """Token info""" + + name: str + date: datetime + + def get_tokens_info(): """Get all tokens info without tokens themselves""" with ReadUserData(UserDataFiles.TOKENS) as tokens: return [ - {"name": token["name"], "date": token["date"]} for token in tokens["tokens"] + BasicTokenInfo( + name=t["name"], + date=parse_date(t["date"]), + ) + for t in tokens["tokens"] ] @@ -120,7 +133,7 @@ def create_token(name): { "token": token, "name": name, - "date": str(datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%fZ")), + "date": str(datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f")), } ) return token @@ -132,7 +145,7 @@ def delete_token(token_name): tokens["tokens"] = [t for t in tokens["tokens"] if t["name"] != token_name] -def refresh_token(token): +def refresh_token(token: str) -> typing.Optional[str]: """Change the token field of the existing token""" new_token = _generate_token() with WriteUserData(UserDataFiles.TOKENS) as tokens: @@ -160,9 +173,7 @@ def is_recovery_token_valid(): return False if "expiration" not in recovery_token or recovery_token["expiration"] is None: return True - return datetime.now() < datetime.strptime( - recovery_token["expiration"], "%Y-%m-%dT%H:%M:%S.%fZ" - ) + return datetime.now() < parse_date(recovery_token["expiration"]) def get_recovery_token_status(): @@ -190,7 +201,9 @@ def _get_recovery_token(): return tokens["recovery_token"]["token"] -def generate_recovery_token(expiration=None, uses_left=None): +def generate_recovery_token( + expiration: typing.Optional[datetime], uses_left: typing.Optional[int] +) -> str: """Generate a 24 bytes recovery token and return a mneomnic word list. Write a string representation of the recovery token to the tokens.json file. """ @@ -210,8 +223,8 @@ def generate_recovery_token(expiration=None, uses_left=None): with WriteUserData(UserDataFiles.TOKENS) as tokens: tokens["recovery_token"] = { "token": recovery_token_str, - "date": str(datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%fZ")), - "expiration": expiration.strftime("%Y-%m-%dT%H:%M:%S.%fZ") + "date": str(datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f")), + "expiration": expiration.strftime("%Y-%m-%dT%H:%M:%S.%f") if expiration is not None else None, "uses_left": uses_left if uses_left is not None else None, @@ -258,7 +271,7 @@ def use_mnemonic_recoverery_token(mnemonic_phrase, name): return token -def get_new_device_auth_token(): +def get_new_device_auth_token() -> str: """Generate a new device auth token which is valid for 10 minutes and return a mnemonic phrase representation Write token to the new_device of the tokens.json file. @@ -282,14 +295,7 @@ def _get_new_device_auth_token(): new_device = tokens["new_device"] if "expiration" not in new_device: return None - if new_device["expiration"].endswith("Z"): - expiration = datetime.strptime( - new_device["expiration"], "%Y-%m-%dT%H:%M:%S.%fZ" - ) - else: - expiration = datetime.strptime( - new_device["expiration"], "%Y-%m-%d %H:%M:%S.%f" - ) + expiration = parse_date(new_device["expiration"]) if datetime.now() > expiration: return None return new_device["token"] diff --git a/selfprivacy_api/utils/block_devices.py b/selfprivacy_api/utils/block_devices.py new file mode 100644 index 0000000..9d96d52 --- /dev/null +++ b/selfprivacy_api/utils/block_devices.py @@ -0,0 +1,226 @@ +"""Wrapper for block device functions.""" +import subprocess +import json +import typing + +from selfprivacy_api.utils import WriteUserData + + +def get_block_device(device_name): + """ + Return a block device by name. + """ + lsblk_output = subprocess.check_output( + [ + "lsblk", + "-J", + "-b", + "-o", + "NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINTS,LABEL,UUID,SIZE,MODEL,SERIAL,TYPE", + f"/dev/{device_name}", + ] + ) + lsblk_output = lsblk_output.decode("utf-8") + lsblk_output = json.loads(lsblk_output) + return lsblk_output["blockdevices"][0] + + +def resize_block_device(block_device) -> bool: + """ + Resize a block device. Return True if successful. + """ + resize_command = ["resize2fs", block_device] + try: + subprocess.check_output(resize_command, shell=False) + except subprocess.CalledProcessError: + return False + return True + + +class BlockDevice: + """ + A block device. + """ + + def __init__(self, block_device): + self.name = block_device["name"] + self.path = block_device["path"] + self.fsavail = str(block_device["fsavail"]) + self.fssize = str(block_device["fssize"]) + self.fstype = block_device["fstype"] + self.fsused = str(block_device["fsused"]) + self.mountpoints = block_device["mountpoints"] + self.label = block_device["label"] + self.uuid = block_device["uuid"] + self.size = str(block_device["size"]) + self.model = block_device["model"] + self.serial = block_device["serial"] + self.type = block_device["type"] + self.locked = False + + def __str__(self): + return self.name + + def __repr__(self): + return f"" + + def __eq__(self, other): + return self.name == other.name + + def __hash__(self): + return hash(self.name) + + def stats(self) -> typing.Dict[str, typing.Any]: + """ + Update current data and return a dictionary of stats. + """ + device = get_block_device(self.name) + self.fsavail = str(device["fsavail"]) + self.fssize = str(device["fssize"]) + self.fstype = device["fstype"] + self.fsused = str(device["fsused"]) + self.mountpoints = device["mountpoints"] + self.label = device["label"] + self.uuid = device["uuid"] + self.size = str(device["size"]) + self.model = device["model"] + self.serial = device["serial"] + self.type = device["type"] + + return { + "name": self.name, + "path": self.path, + "fsavail": self.fsavail, + "fssize": self.fssize, + "fstype": self.fstype, + "fsused": self.fsused, + "mountpoints": self.mountpoints, + "label": self.label, + "uuid": self.uuid, + "size": self.size, + "model": self.model, + "serial": self.serial, + "type": self.type, + } + + def resize(self): + """ + Resize the block device. + """ + if not self.locked: + self.locked = True + resize_block_device(self.path) + self.locked = False + + def mount(self) -> bool: + """ + Mount the block device. + """ + with WriteUserData() as user_data: + if "volumes" not in user_data: + user_data["volumes"] = [] + # Check if the volume is already mounted + for volume in user_data["volumes"]: + if volume["device"] == self.path: + return False + user_data["volumes"].append( + { + "device": self.path, + "mountPoint": f"/volumes/{self.name}", + "fsType": self.fstype, + } + ) + return True + + def unmount(self) -> bool: + """ + Unmount the block device. + """ + with WriteUserData() as user_data: + if "volumes" not in user_data: + user_data["volumes"] = [] + # Check if the volume is already mounted + for volume in user_data["volumes"]: + if volume["device"] == self.path: + user_data["volumes"].remove(volume) + return True + return False + + +class BlockDevices: + """Singleton holding all Block devices""" + + _instance = None + + def __new__(cls, *args, **kwargs): + if not cls._instance: + cls._instance = super().__new__(cls) + return cls._instance + + def __init__(self): + self.block_devices = [] + self.update() + + def update(self) -> None: + """ + Update the list of block devices. + """ + devices = [] + lsblk_output = subprocess.check_output( + [ + "lsblk", + "-J", + "-b", + "-o", + "NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINTS,LABEL,UUID,SIZE,MODEL,SERIAL,TYPE", + ] + ) + lsblk_output = lsblk_output.decode("utf-8") + lsblk_output = json.loads(lsblk_output) + for device in lsblk_output["blockdevices"]: + # Ignore devices with type "rom" + if device["type"] == "rom": + continue + if device["fstype"] is None: + if "children" in device: + for child in device["children"]: + if child["fstype"] == "ext4": + device = child + break + devices.append(device) + # Add new devices and delete non-existent devices + for device in devices: + if device["name"] not in [ + block_device.name for block_device in self.block_devices + ]: + self.block_devices.append(BlockDevice(device)) + for block_device in self.block_devices: + if block_device.name not in [device["name"] for device in devices]: + self.block_devices.remove(block_device) + + def get_block_device(self, name: str) -> typing.Optional[BlockDevice]: + """ + Return a block device by name. + """ + for block_device in self.block_devices: + if block_device.name == name: + return block_device + return None + + def get_block_devices(self) -> typing.List[BlockDevice]: + """ + Return a list of block devices. + """ + return self.block_devices + + def get_block_devices_by_mountpoint( + self, mountpoint: str + ) -> typing.List[BlockDevice]: + """ + Return a list of block devices with a given mountpoint. + """ + block_devices = [] + for block_device in self.block_devices: + if mountpoint in block_device.mountpoints: + block_devices.append(block_device) + return block_devices diff --git a/selfprivacy_api/utils/huey.py b/selfprivacy_api/utils/huey.py new file mode 100644 index 0000000..034f7ba --- /dev/null +++ b/selfprivacy_api/utils/huey.py @@ -0,0 +1,14 @@ +"""MiniHuey singleton.""" +import os +from huey import SqliteHuey + +HUEY_DATABASE = "/etc/nixos/userdata/tasks.db" + +# Singleton instance containing the huey database. + +test_mode = os.environ.get("TEST_MODE") + +huey = SqliteHuey( + HUEY_DATABASE, + immediate=test_mode == "true", +) diff --git a/selfprivacy_api/utils/network.py b/selfprivacy_api/utils/network.py new file mode 100644 index 0000000..c1b8a2b --- /dev/null +++ b/selfprivacy_api/utils/network.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python3 +"""Network utils""" +import subprocess +import re +from typing import Optional + + +def get_ip4() -> str: + """Get IPv4 address""" + try: + ip4 = subprocess.check_output(["ip", "addr", "show", "dev", "eth0"]).decode( + "utf-8" + ) + ip4 = re.search(r"inet (\d+\.\d+\.\d+\.\d+)\/\d+", ip4) + except subprocess.CalledProcessError: + ip4 = None + return ip4.group(1) if ip4 else "" + + +def get_ip6() -> str: + """Get IPv6 address""" + try: + ip6 = subprocess.check_output(["ip", "addr", "show", "dev", "eth0"]).decode( + "utf-8" + ) + ip6 = re.search(r"inet6 (\S+)\/\d+", ip6) + except subprocess.CalledProcessError: + ip6 = None + return ip6.group(1) if ip6 else "" diff --git a/setup.py b/setup.py index 5619621..eabc165 100755 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import setup, find_packages setup( name="selfprivacy_api", - version="1.2.7", + version="2.0.0", packages=find_packages(), scripts=[ "selfprivacy_api/app.py", diff --git a/shell.nix b/shell.nix index 79f3623..0ccb99d 100644 --- a/shell.nix +++ b/shell.nix @@ -1,21 +1,62 @@ -{ pkgs ? import {} }: +{ pkgs ? import { } }: let sp-python = pkgs.python39.withPackages (p: with p; [ - flask - flask-restful - setuptools - portalocker - flask-swagger - flask-swagger-ui - pytz - pytest - pytest-mock - pytest-datadir - huey + setuptools + portalocker + pytz + pytest + pytest-mock + pytest-datadir + huey gevent mnemonic coverage pylint + pydantic + typing-extensions + psutil + black + fastapi + uvicorn + (buildPythonPackage rec { + pname = "strawberry-graphql"; + version = "0.123.0"; + format = "pyproject"; + patches = [ + ./strawberry-graphql.patch + ]; + propagatedBuildInputs = [ + typing-extensions + python-multipart + python-dateutil + # flask + pydantic + pygments + poetry + # flask-cors + (buildPythonPackage rec { + pname = "graphql-core"; + version = "3.2.0"; + format = "setuptools"; + src = fetchPypi { + inherit pname version; + sha256 = "sha256-huKgvgCL/eGe94OI3opyWh2UKpGQykMcJKYIN5c4A84="; + }; + checkInputs = [ + pytest-asyncio + pytest-benchmark + pytestCheckHook + ]; + pythonImportsCheck = [ + "graphql" + ]; + }) + ]; + src = fetchPypi { + inherit pname version; + sha256 = "KsmZ5Xv8tUg6yBxieAEtvoKoRG60VS+iVGV0X6oCExo="; + }; + }) ]); in pkgs.mkShell { @@ -27,4 +68,4 @@ pkgs.mkShell { PYTHONPATH=${sp-python}/${sp-python.sitePackages} # maybe set more env-vars ''; -} \ No newline at end of file +} diff --git a/strawberry-graphql.patch b/strawberry-graphql.patch new file mode 100644 index 0000000..a731522 --- /dev/null +++ b/strawberry-graphql.patch @@ -0,0 +1,96 @@ +diff --git a/pyproject.toml b/pyproject.toml +index 0cbf2ef..7736e92 100644 +--- a/pyproject.toml ++++ b/pyproject.toml +@@ -51,7 +51,6 @@ python-multipart = "^0.0.5" + sanic = {version = ">=20.12.2,<22.0.0", optional = true} + aiohttp = {version = "^3.7.4.post0", optional = true} + fastapi = {version = ">=0.65.2", optional = true} +-"backports.cached-property" = "^1.0.1" + + [tool.poetry.dev-dependencies] + pytest = "^7.1" +diff --git a/strawberry/directive.py b/strawberry/directive.py +index 491e390..26ba345 100644 +--- a/strawberry/directive.py ++++ b/strawberry/directive.py +@@ -1,10 +1,10 @@ + from __future__ import annotations + + import dataclasses ++from functools import cached_property + import inspect + from typing import Any, Callable, List, Optional, TypeVar + +-from backports.cached_property import cached_property + from typing_extensions import Annotated + + from graphql import DirectiveLocation +diff --git a/strawberry/extensions/tracing/datadog.py b/strawberry/extensions/tracing/datadog.py +index 01fba20..7c06950 100644 +--- a/strawberry/extensions/tracing/datadog.py ++++ b/strawberry/extensions/tracing/datadog.py +@@ -1,8 +1,8 @@ + import hashlib ++from functools import cached_property + from inspect import isawaitable + from typing import Optional + +-from backports.cached_property import cached_property + from ddtrace import tracer + + from strawberry.extensions import Extension +diff --git a/strawberry/field.py b/strawberry/field.py +index 80ed12a..f1bf2e9 100644 +--- a/strawberry/field.py ++++ b/strawberry/field.py +@@ -1,5 +1,6 @@ + import builtins + import dataclasses ++from functools import cached_property + import inspect + import sys + from typing import ( +@@ -18,7 +19,6 @@ from typing import ( + overload, + ) + +-from backports.cached_property import cached_property + from typing_extensions import Literal + + from strawberry.annotation import StrawberryAnnotation +diff --git a/strawberry/types/fields/resolver.py b/strawberry/types/fields/resolver.py +index c5b3edd..f4112ce 100644 +--- a/strawberry/types/fields/resolver.py ++++ b/strawberry/types/fields/resolver.py +@@ -1,6 +1,7 @@ + from __future__ import annotations as _ + + import builtins ++from functools import cached_property + import inspect + import sys + import warnings +@@ -22,7 +23,6 @@ from typing import ( # type: ignore[attr-defined] + _eval_type, + ) + +-from backports.cached_property import cached_property + from typing_extensions import Annotated, Protocol, get_args, get_origin + + from strawberry.annotation import StrawberryAnnotation +diff --git a/strawberry/types/info.py b/strawberry/types/info.py +index a172c04..475a3ee 100644 +--- a/strawberry/types/info.py ++++ b/strawberry/types/info.py +@@ -1,9 +1,8 @@ + import dataclasses ++from functools import cached_property + import warnings + from typing import TYPE_CHECKING, Any, Dict, Generic, List, Optional, TypeVar, Union + +-from backports.cached_property import cached_property +- + from graphql import GraphQLResolveInfo, OperationDefinitionNode + from graphql.language import FieldNode + from graphql.pyutils.path import Path diff --git a/tests/common.py b/tests/common.py new file mode 100644 index 0000000..18e065c --- /dev/null +++ b/tests/common.py @@ -0,0 +1,28 @@ +import json +from mnemonic import Mnemonic + + +def read_json(file_path): + with open(file_path, "r", encoding="utf-8") as file: + return json.load(file) + + +def write_json(file_path, data): + with open(file_path, "w", encoding="utf-8") as file: + json.dump(data, file, indent=4) + + +def generate_api_query(query_array): + return "query TestApi {\n api {" + "\n".join(query_array) + "}\n}" + + +def generate_system_query(query_array): + return "query TestSystem {\n system {" + "\n".join(query_array) + "}\n}" + + +def generate_users_query(query_array): + return "query TestUsers {\n users {" + "\n".join(query_array) + "}\n}" + + +def mnemonic_to_hex(mnemonic): + return Mnemonic(language="english").to_entropy(mnemonic).hex() diff --git a/tests/conftest.py b/tests/conftest.py index 7a6fdea..ea7a66a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,10 +1,18 @@ +"""Tests configuration.""" +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +import os import pytest -from flask import testing -from selfprivacy_api.app import create_app +from fastapi.testclient import TestClient + + +def pytest_generate_tests(metafunc): + os.environ["TEST_MODE"] = "true" @pytest.fixture def tokens_file(mocker, shared_datadir): + """Mock tokens file.""" mock = mocker.patch( "selfprivacy_api.utils.TOKENS_FILE", shared_datadir / "tokens.json" ) @@ -12,57 +20,43 @@ def tokens_file(mocker, shared_datadir): @pytest.fixture -def app(): - app = create_app( - { - "ENABLE_SWAGGER": "1", - } +def jobs_file(mocker, shared_datadir): + """Mock tokens file.""" + mock = mocker.patch("selfprivacy_api.utils.JOBS_FILE", shared_datadir / "jobs.json") + return mock + + +@pytest.fixture +def huey_database(mocker, shared_datadir): + """Mock huey database.""" + mock = mocker.patch( + "selfprivacy_api.utils.huey.HUEY_DATABASE", shared_datadir / "huey.db" ) - - yield app + return mock @pytest.fixture -def client(app, tokens_file): - return app.test_client() +def client(tokens_file, huey_database, jobs_file): + from selfprivacy_api.app import app - -class AuthorizedClient(testing.FlaskClient): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.token = "TEST_TOKEN" - - def open(self, *args, **kwargs): - if "headers" not in kwargs: - kwargs["headers"] = {} - kwargs["headers"]["Authorization"] = f"Bearer {self.token}" - return super().open(*args, **kwargs) - - -class WrongAuthClient(testing.FlaskClient): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.token = "WRONG_TOKEN" - - def open(self, *args, **kwargs): - if "headers" not in kwargs: - kwargs["headers"] = {} - kwargs["headers"]["Authorization"] = f"Bearer {self.token}" - return super().open(*args, **kwargs) + return TestClient(app) @pytest.fixture -def authorized_client(app, tokens_file): - app.test_client_class = AuthorizedClient - return app.test_client() +def authorized_client(tokens_file, huey_database, jobs_file): + """Authorized test client fixture.""" + from selfprivacy_api.app import app + + client = TestClient(app) + client.headers.update({"Authorization": "Bearer TEST_TOKEN"}) + return client @pytest.fixture -def wrong_auth_client(app, tokens_file): - app.test_client_class = WrongAuthClient - return app.test_client() +def wrong_auth_client(tokens_file, huey_database, jobs_file): + """Wrong token test client fixture.""" + from selfprivacy_api.app import app - -@pytest.fixture -def runner(app, tokens_file): - return app.test_cli_runner() + client = TestClient(app) + client.headers.update({"Authorization": "Bearer WRONG_TOKEN"}) + return client diff --git a/tests/data/jobs.json b/tests/data/jobs.json new file mode 100644 index 0000000..0967ef4 --- /dev/null +++ b/tests/data/jobs.json @@ -0,0 +1 @@ +{} diff --git a/tests/test_block_device_utils.py b/tests/test_block_device_utils.py new file mode 100644 index 0000000..2676e6c --- /dev/null +++ b/tests/test_block_device_utils.py @@ -0,0 +1,490 @@ +#!/usr/bin/env python3 +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=missing-function-docstring +import json +import subprocess +import pytest + +from selfprivacy_api.utils.block_devices import ( + BlockDevice, + BlockDevices, + get_block_device, + resize_block_device, +) +from tests.common import read_json + +SINGLE_LSBLK_OUTPUT = b""" +{ + "blockdevices": [ + { + "name": "sda1", + "path": "/dev/sda1", + "fsavail": "4614107136", + "fssize": "19814920192", + "fstype": "ext4", + "fsused": "14345314304", + "mountpoints": [ + "/nix/store", "/" + ], + "label": null, + "uuid": "ec80c004-baec-4a2c-851d-0e1807135511", + "size": 20210236928, + "model": null, + "serial": null, + "type": "part" + } + ] +} +""" + + +@pytest.fixture +def lsblk_singular_mock(mocker): + mock = mocker.patch( + "subprocess.check_output", autospec=True, return_value=SINGLE_LSBLK_OUTPUT + ) + return mock + + +@pytest.fixture +def failed_check_output_mock(mocker): + mock = mocker.patch( + "subprocess.check_output", + autospec=True, + side_effect=subprocess.CalledProcessError( + returncode=1, cmd=["some", "command"] + ), + ) + return mock + + +@pytest.fixture +def only_root_in_userdata(mocker, datadir): + mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "only_root.json") + assert read_json(datadir / "only_root.json")["volumes"][0]["device"] == "/dev/sda1" + assert ( + read_json(datadir / "only_root.json")["volumes"][0]["mountPoint"] + == "/volumes/sda1" + ) + assert read_json(datadir / "only_root.json")["volumes"][0]["filesystem"] == "ext4" + return datadir + + +@pytest.fixture +def no_devices_in_userdata(mocker, datadir): + mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "no_devices.json") + assert read_json(datadir / "no_devices.json")["volumes"] == [] + return datadir + + +@pytest.fixture +def undefined_devices_in_userdata(mocker, datadir): + mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "undefined.json") + assert "volumes" not in read_json(datadir / "undefined.json") + return datadir + + +def test_create_block_device_object(lsblk_singular_mock, authorized_client): + output = get_block_device("sda1") + assert lsblk_singular_mock.call_count == 1 + assert lsblk_singular_mock.call_args[0][0] == [ + "lsblk", + "-J", + "-b", + "-o", + "NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINTS,LABEL,UUID,SIZE,MODEL,SERIAL,TYPE", + "/dev/sda1", + ] + assert output == json.loads(SINGLE_LSBLK_OUTPUT)["blockdevices"][0] + + +def test_resize_block_device(lsblk_singular_mock, authorized_client): + result = resize_block_device("sdb") + assert result is True + assert lsblk_singular_mock.call_count == 1 + assert lsblk_singular_mock.call_args[0][0] == [ + "resize2fs", + "sdb", + ] + + +def test_resize_block_device_failed(failed_check_output_mock, authorized_client): + result = resize_block_device("sdb") + assert result is False + assert failed_check_output_mock.call_count == 1 + assert failed_check_output_mock.call_args[0][0] == [ + "resize2fs", + "sdb", + ] + + +VOLUME_LSBLK_OUTPUT = b""" +{ + "blockdevices": [ + { + "name": "sdb", + "path": "/dev/sdb", + "fsavail": "11888545792", + "fssize": "12573614080", + "fstype": "ext4", + "fsused": "24047616", + "mountpoints": [ + "/volumes/sdb" + ], + "label": null, + "uuid": "fa9d0026-ee23-4047-b8b1-297ae16fa751", + "size": 12884901888, + "model": "Volume", + "serial": "21378102", + "type": "disk" + } + ] +} +""" + + +def test_create_block_device(lsblk_singular_mock, authorized_client): + block_device = BlockDevice(json.loads(VOLUME_LSBLK_OUTPUT)["blockdevices"][0]) + + assert block_device.name == "sdb" + assert block_device.path == "/dev/sdb" + assert block_device.fsavail == "11888545792" + assert block_device.fssize == "12573614080" + assert block_device.fstype == "ext4" + assert block_device.fsused == "24047616" + assert block_device.mountpoints == ["/volumes/sdb"] + assert block_device.label is None + assert block_device.uuid == "fa9d0026-ee23-4047-b8b1-297ae16fa751" + assert block_device.size == "12884901888" + assert block_device.model == "Volume" + assert block_device.serial == "21378102" + assert block_device.type == "disk" + assert block_device.locked is False + assert str(block_device) == "sdb" + assert ( + repr(block_device) + == "" + ) + assert hash(block_device) == hash("sdb") + + +def test_block_devices_equal(lsblk_singular_mock, authorized_client): + block_device = BlockDevice(json.loads(VOLUME_LSBLK_OUTPUT)["blockdevices"][0]) + block_device2 = BlockDevice(json.loads(VOLUME_LSBLK_OUTPUT)["blockdevices"][0]) + + assert block_device == block_device2 + + +@pytest.fixture +def resize_block_mock(mocker): + mock = mocker.patch( + "selfprivacy_api.utils.block_devices.resize_block_device", + autospec=True, + return_value=True, + ) + return mock + + +def test_call_resize_from_block_device( + lsblk_singular_mock, resize_block_mock, authorized_client +): + block_device = BlockDevice(json.loads(VOLUME_LSBLK_OUTPUT)["blockdevices"][0]) + block_device.resize() + assert resize_block_mock.call_count == 1 + assert resize_block_mock.call_args[0][0] == "/dev/sdb" + assert lsblk_singular_mock.call_count == 0 + + +def test_get_stats_from_block_device(lsblk_singular_mock, authorized_client): + block_device = BlockDevice(json.loads(SINGLE_LSBLK_OUTPUT)["blockdevices"][0]) + stats = block_device.stats() + assert stats == { + "name": "sda1", + "path": "/dev/sda1", + "fsavail": "4614107136", + "fssize": "19814920192", + "fstype": "ext4", + "fsused": "14345314304", + "mountpoints": ["/nix/store", "/"], + "label": None, + "uuid": "ec80c004-baec-4a2c-851d-0e1807135511", + "size": "20210236928", + "model": None, + "serial": None, + "type": "part", + } + assert lsblk_singular_mock.call_count == 1 + assert lsblk_singular_mock.call_args[0][0] == [ + "lsblk", + "-J", + "-b", + "-o", + "NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINTS,LABEL,UUID,SIZE,MODEL,SERIAL,TYPE", + "/dev/sda1", + ] + + +def test_mount_block_device( + lsblk_singular_mock, only_root_in_userdata, authorized_client +): + block_device = BlockDevice(json.loads(SINGLE_LSBLK_OUTPUT)["blockdevices"][0]) + result = block_device.mount() + assert result is False + volume = BlockDevice(json.loads(VOLUME_LSBLK_OUTPUT)["blockdevices"][0]) + result = volume.mount() + assert result is True + assert ( + read_json(only_root_in_userdata / "only_root.json")["volumes"][1]["device"] + == "/dev/sdb" + ) + assert ( + read_json(only_root_in_userdata / "only_root.json")["volumes"][1]["mountPoint"] + == "/volumes/sdb" + ) + assert ( + read_json(only_root_in_userdata / "only_root.json")["volumes"][1]["fsType"] + == "ext4" + ) + + +def test_mount_block_device_when_undefined( + lsblk_singular_mock, undefined_devices_in_userdata, authorized_client +): + block_device = BlockDevice(json.loads(SINGLE_LSBLK_OUTPUT)["blockdevices"][0]) + result = block_device.mount() + assert result is True + assert ( + read_json(undefined_devices_in_userdata / "undefined.json")["volumes"][0][ + "device" + ] + == "/dev/sda1" + ) + assert ( + read_json(undefined_devices_in_userdata / "undefined.json")["volumes"][0][ + "mountPoint" + ] + == "/volumes/sda1" + ) + assert ( + read_json(undefined_devices_in_userdata / "undefined.json")["volumes"][0][ + "fsType" + ] + == "ext4" + ) + + +def test_unmount_block_device( + lsblk_singular_mock, only_root_in_userdata, authorized_client +): + block_device = BlockDevice(json.loads(SINGLE_LSBLK_OUTPUT)["blockdevices"][0]) + result = block_device.unmount() + assert result is True + volume = BlockDevice(json.loads(VOLUME_LSBLK_OUTPUT)["blockdevices"][0]) + result = volume.unmount() + assert result is False + assert len(read_json(only_root_in_userdata / "only_root.json")["volumes"]) == 0 + + +def test_unmount_block_device_when_undefined( + lsblk_singular_mock, undefined_devices_in_userdata, authorized_client +): + block_device = BlockDevice(json.loads(SINGLE_LSBLK_OUTPUT)["blockdevices"][0]) + result = block_device.unmount() + assert result is False + assert ( + len(read_json(undefined_devices_in_userdata / "undefined.json")["volumes"]) == 0 + ) + + +FULL_LSBLK_OUTPUT = b""" +{ + "blockdevices": [ + { + "name": "sda", + "path": "/dev/sda", + "fsavail": null, + "fssize": null, + "fstype": null, + "fsused": null, + "mountpoints": [ + null + ], + "label": null, + "uuid": null, + "size": 20480786432, + "model": "QEMU HARDDISK", + "serial": "drive-scsi0-0-0-0", + "type": "disk", + "children": [ + { + "name": "sda1", + "path": "/dev/sda1", + "fsavail": "4605702144", + "fssize": "19814920192", + "fstype": "ext4", + "fsused": "14353719296", + "mountpoints": [ + "/nix/store", "/" + ], + "label": null, + "uuid": "ec80c004-baec-4a2c-851d-0e1807135511", + "size": 20210236928, + "model": null, + "serial": null, + "type": "part" + },{ + "name": "sda14", + "path": "/dev/sda14", + "fsavail": null, + "fssize": null, + "fstype": null, + "fsused": null, + "mountpoints": [ + null + ], + "label": null, + "uuid": null, + "size": 1048576, + "model": null, + "serial": null, + "type": "part" + },{ + "name": "sda15", + "path": "/dev/sda15", + "fsavail": null, + "fssize": null, + "fstype": "vfat", + "fsused": null, + "mountpoints": [ + null + ], + "label": null, + "uuid": "6B29-5BA7", + "size": 268435456, + "model": null, + "serial": null, + "type": "part" + } + ] + },{ + "name": "sdb", + "path": "/dev/sdb", + "fsavail": "11888545792", + "fssize": "12573614080", + "fstype": "ext4", + "fsused": "24047616", + "mountpoints": [ + "/volumes/sdb" + ], + "label": null, + "uuid": "fa9d0026-ee23-4047-b8b1-297ae16fa751", + "size": 12884901888, + "model": "Volume", + "serial": "21378102", + "type": "disk" + },{ + "name": "sr0", + "path": "/dev/sr0", + "fsavail": null, + "fssize": null, + "fstype": null, + "fsused": null, + "mountpoints": [ + null + ], + "label": null, + "uuid": null, + "size": 1073741312, + "model": "QEMU DVD-ROM", + "serial": "QM00003", + "type": "rom" + } + ] +} +""" + + +@pytest.fixture +def lsblk_full_mock(mocker): + mock = mocker.patch( + "subprocess.check_output", autospec=True, return_value=FULL_LSBLK_OUTPUT + ) + return mock + + +def test_get_block_devices(lsblk_full_mock, authorized_client): + block_devices = BlockDevices().get_block_devices() + assert len(block_devices) == 2 + assert block_devices[0].name == "sda1" + assert block_devices[0].path == "/dev/sda1" + assert block_devices[0].fsavail == "4605702144" + assert block_devices[0].fssize == "19814920192" + assert block_devices[0].fstype == "ext4" + assert block_devices[0].fsused == "14353719296" + assert block_devices[0].mountpoints == ["/nix/store", "/"] + assert block_devices[0].label is None + assert block_devices[0].uuid == "ec80c004-baec-4a2c-851d-0e1807135511" + assert block_devices[0].size == "20210236928" + assert block_devices[0].model is None + assert block_devices[0].serial is None + assert block_devices[0].type == "part" + assert block_devices[1].name == "sdb" + assert block_devices[1].path == "/dev/sdb" + assert block_devices[1].fsavail == "11888545792" + assert block_devices[1].fssize == "12573614080" + assert block_devices[1].fstype == "ext4" + assert block_devices[1].fsused == "24047616" + assert block_devices[1].mountpoints == ["/volumes/sdb"] + assert block_devices[1].label is None + assert block_devices[1].uuid == "fa9d0026-ee23-4047-b8b1-297ae16fa751" + assert block_devices[1].size == "12884901888" + assert block_devices[1].model == "Volume" + assert block_devices[1].serial == "21378102" + assert block_devices[1].type == "disk" + + +def test_get_block_device(lsblk_full_mock, authorized_client): + block_device = BlockDevices().get_block_device("sda1") + assert block_device is not None + assert block_device.name == "sda1" + assert block_device.path == "/dev/sda1" + assert block_device.fsavail == "4605702144" + assert block_device.fssize == "19814920192" + assert block_device.fstype == "ext4" + assert block_device.fsused == "14353719296" + assert block_device.mountpoints == ["/nix/store", "/"] + assert block_device.label is None + assert block_device.uuid == "ec80c004-baec-4a2c-851d-0e1807135511" + assert block_device.size == "20210236928" + assert block_device.model is None + assert block_device.serial is None + assert block_device.type == "part" + + +def test_get_nonexistent_block_device(lsblk_full_mock, authorized_client): + block_device = BlockDevices().get_block_device("sda2") + assert block_device is None + + +def test_get_block_devices_by_mountpoint(lsblk_full_mock, authorized_client): + block_devices = BlockDevices().get_block_devices_by_mountpoint("/nix/store") + assert len(block_devices) == 1 + assert block_devices[0].name == "sda1" + assert block_devices[0].path == "/dev/sda1" + assert block_devices[0].fsavail == "4605702144" + assert block_devices[0].fssize == "19814920192" + assert block_devices[0].fstype == "ext4" + assert block_devices[0].fsused == "14353719296" + assert block_devices[0].mountpoints == ["/nix/store", "/"] + assert block_devices[0].label is None + assert block_devices[0].uuid == "ec80c004-baec-4a2c-851d-0e1807135511" + assert block_devices[0].size == "20210236928" + assert block_devices[0].model is None + assert block_devices[0].serial is None + assert block_devices[0].type == "part" + + +def test_get_block_devices_by_mountpoint_no_match(lsblk_full_mock, authorized_client): + block_devices = BlockDevices().get_block_devices_by_mountpoint("/foo") + assert len(block_devices) == 0 diff --git a/tests/test_block_device_utils/no_devices.json b/tests/test_block_device_utils/no_devices.json new file mode 100644 index 0000000..97300ca --- /dev/null +++ b/tests/test_block_device_utils/no_devices.json @@ -0,0 +1,54 @@ +{ + "backblaze": { + "accountId": "ID", + "accountKey": "KEY", + "bucket": "selfprivacy" + }, + "api": { + "token": "TEST_TOKEN", + "enableSwagger": false + }, + "bitwarden": { + "enable": true + }, + "cloudflare": { + "apiKey": "TOKEN" + }, + "databasePassword": "PASSWORD", + "domain": "test.tld", + "hashedMasterPassword": "HASHED_PASSWORD", + "hostname": "test-instance", + "nextcloud": { + "adminPassword": "ADMIN", + "databasePassword": "ADMIN", + "enable": true + }, + "resticPassword": "PASS", + "ssh": { + "enable": true, + "passwordAuthentication": true, + "rootKeys": [ + "ssh-ed25519 KEY test@pc" + ] + }, + "username": "tester", + "gitea": { + "enable": false + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true + }, + "autoUpgrade": { + "enable": true, + "allowReboot": true + }, + "timezone": "Europe/Moscow", + "sshKeys": [ + "ssh-rsa KEY test@pc" + ], + "volumes": [ + ] +} diff --git a/tests/test_block_device_utils/only_root.json b/tests/test_block_device_utils/only_root.json new file mode 100644 index 0000000..0f8ec0d --- /dev/null +++ b/tests/test_block_device_utils/only_root.json @@ -0,0 +1,59 @@ +{ + "backblaze": { + "accountId": "ID", + "accountKey": "KEY", + "bucket": "selfprivacy" + }, + "api": { + "token": "TEST_TOKEN", + "enableSwagger": false + }, + "bitwarden": { + "enable": true + }, + "cloudflare": { + "apiKey": "TOKEN" + }, + "databasePassword": "PASSWORD", + "domain": "test.tld", + "hashedMasterPassword": "HASHED_PASSWORD", + "hostname": "test-instance", + "nextcloud": { + "adminPassword": "ADMIN", + "databasePassword": "ADMIN", + "enable": true + }, + "resticPassword": "PASS", + "ssh": { + "enable": true, + "passwordAuthentication": true, + "rootKeys": [ + "ssh-ed25519 KEY test@pc" + ] + }, + "username": "tester", + "gitea": { + "enable": false + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true + }, + "autoUpgrade": { + "enable": true, + "allowReboot": true + }, + "timezone": "Europe/Moscow", + "sshKeys": [ + "ssh-rsa KEY test@pc" + ], + "volumes": [ + { + "device": "/dev/sda1", + "mountPoint": "/volumes/sda1", + "filesystem": "ext4" + } + ] +} diff --git a/tests/test_block_device_utils/undefined.json b/tests/test_block_device_utils/undefined.json new file mode 100644 index 0000000..eb660cc --- /dev/null +++ b/tests/test_block_device_utils/undefined.json @@ -0,0 +1,52 @@ +{ + "backblaze": { + "accountId": "ID", + "accountKey": "KEY", + "bucket": "selfprivacy" + }, + "api": { + "token": "TEST_TOKEN", + "enableSwagger": false + }, + "bitwarden": { + "enable": true + }, + "cloudflare": { + "apiKey": "TOKEN" + }, + "databasePassword": "PASSWORD", + "domain": "test.tld", + "hashedMasterPassword": "HASHED_PASSWORD", + "hostname": "test-instance", + "nextcloud": { + "adminPassword": "ADMIN", + "databasePassword": "ADMIN", + "enable": true + }, + "resticPassword": "PASS", + "ssh": { + "enable": true, + "passwordAuthentication": true, + "rootKeys": [ + "ssh-ed25519 KEY test@pc" + ] + }, + "username": "tester", + "gitea": { + "enable": false + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true + }, + "autoUpgrade": { + "enable": true, + "allowReboot": true + }, + "timezone": "Europe/Moscow", + "sshKeys": [ + "ssh-rsa KEY test@pc" + ] +} diff --git a/tests/test_common.py b/tests/test_common.py index db60d84..e5d3f62 100644 --- a/tests/test_common.py +++ b/tests/test_common.py @@ -1,6 +1,7 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument import json +import os import pytest from selfprivacy_api.utils import WriteUserData, ReadUserData @@ -9,19 +10,13 @@ from selfprivacy_api.utils import WriteUserData, ReadUserData def test_get_api_version(authorized_client): response = authorized_client.get("/api/version") assert response.status_code == 200 - assert "version" in response.get_json() + assert "version" in response.json() def test_get_api_version_unauthorized(client): response = client.get("/api/version") assert response.status_code == 200 - assert "version" in response.get_json() - - -def test_get_swagger_json(authorized_client): - response = authorized_client.get("/api/swagger.json") - assert response.status_code == 200 - assert "swagger" in response.get_json() + assert "version" in response.json() def test_read_invalid_user_data(): @@ -34,3 +29,12 @@ def test_write_invalid_user_data(): with pytest.raises(ValueError): with WriteUserData("invalid") as user_data: pass + + +@pytest.fixture +def test_mode(): + return os.environ.get("TEST_MODE") + + +def test_the_test_mode(test_mode): + assert test_mode == "true" diff --git a/tests/test_graphql/__init__.py b/tests/test_graphql/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/test_graphql/data/tokens.json b/tests/test_graphql/data/tokens.json new file mode 100644 index 0000000..9be9d02 --- /dev/null +++ b/tests/test_graphql/data/tokens.json @@ -0,0 +1,14 @@ +{ + "tokens": [ + { + "token": "TEST_TOKEN", + "name": "test_token", + "date": "2022-01-14 08:31:10.789314" + }, + { + "token": "TEST_TOKEN2", + "name": "test_token2", + "date": "2022-01-14 08:31:10.789314" + } + ] +} \ No newline at end of file diff --git a/tests/test_graphql/test_api.py b/tests/test_graphql/test_api.py new file mode 100644 index 0000000..16c7c4d --- /dev/null +++ b/tests/test_graphql/test_api.py @@ -0,0 +1,57 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=missing-function-docstring + +from tests.common import generate_api_query +from tests.test_graphql.test_api_devices import API_DEVICES_QUERY +from tests.test_graphql.test_api_recovery import API_RECOVERY_QUERY +from tests.test_graphql.test_api_version import API_VERSION_QUERY + +TOKENS_FILE_CONTETS = { + "tokens": [ + { + "token": "TEST_TOKEN", + "name": "test_token", + "date": "2022-01-14 08:31:10.789314", + }, + { + "token": "TEST_TOKEN2", + "name": "test_token2", + "date": "2022-01-14 08:31:10.789314", + }, + ] +} + + +def test_graphql_get_entire_api_data(authorized_client, tokens_file): + response = authorized_client.post( + "/graphql", + json={ + "query": generate_api_query( + [API_VERSION_QUERY, API_DEVICES_QUERY, API_RECOVERY_QUERY] + ) + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert "version" in response.json()["data"]["api"] + assert response.json()["data"]["api"]["devices"] is not None + assert len(response.json()["data"]["api"]["devices"]) == 2 + assert ( + response.json()["data"]["api"]["devices"][0]["creationDate"] + == "2022-01-14T08:31:10.789314" + ) + assert response.json()["data"]["api"]["devices"][0]["isCaller"] is True + assert response.json()["data"]["api"]["devices"][0]["name"] == "test_token" + assert ( + response.json()["data"]["api"]["devices"][1]["creationDate"] + == "2022-01-14T08:31:10.789314" + ) + assert response.json()["data"]["api"]["devices"][1]["isCaller"] is False + assert response.json()["data"]["api"]["devices"][1]["name"] == "test_token2" + assert response.json()["data"]["api"]["recoveryKey"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["exists"] is False + assert response.json()["data"]["api"]["recoveryKey"]["valid"] is False + assert response.json()["data"]["api"]["recoveryKey"]["creationDate"] is None + assert response.json()["data"]["api"]["recoveryKey"]["expirationDate"] is None + assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] is None diff --git a/tests/test_graphql/test_api_devices.py b/tests/test_graphql/test_api_devices.py new file mode 100644 index 0000000..d8dc974 --- /dev/null +++ b/tests/test_graphql/test_api_devices.py @@ -0,0 +1,484 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=missing-function-docstring +import datetime +from mnemonic import Mnemonic + +from tests.common import generate_api_query, read_json, write_json + +TOKENS_FILE_CONTETS = { + "tokens": [ + { + "token": "TEST_TOKEN", + "name": "test_token", + "date": "2022-01-14 08:31:10.789314", + }, + { + "token": "TEST_TOKEN2", + "name": "test_token2", + "date": "2022-01-14 08:31:10.789314", + }, + ] +} + +API_DEVICES_QUERY = """ +devices { + creationDate + isCaller + name +} +""" + + +def test_graphql_tokens_info(authorized_client, tokens_file): + response = authorized_client.post( + "/graphql", + json={"query": generate_api_query([API_DEVICES_QUERY])}, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["api"]["devices"] is not None + assert len(response.json()["data"]["api"]["devices"]) == 2 + assert ( + response.json()["data"]["api"]["devices"][0]["creationDate"] + == "2022-01-14T08:31:10.789314" + ) + assert response.json()["data"]["api"]["devices"][0]["isCaller"] is True + assert response.json()["data"]["api"]["devices"][0]["name"] == "test_token" + assert ( + response.json()["data"]["api"]["devices"][1]["creationDate"] + == "2022-01-14T08:31:10.789314" + ) + assert response.json()["data"]["api"]["devices"][1]["isCaller"] is False + assert response.json()["data"]["api"]["devices"][1]["name"] == "test_token2" + + +def test_graphql_tokens_info_unauthorized(client, tokens_file): + response = client.post( + "/graphql", + json={"query": generate_api_query([API_DEVICES_QUERY])}, + ) + assert response.status_code == 200 + assert response.json()["data"] is None + + +DELETE_TOKEN_MUTATION = """ +mutation DeleteToken($device: String!) { + deleteDeviceApiToken(device: $device) { + success + message + code + } +} +""" + + +def test_graphql_delete_token_unauthorized(client, tokens_file): + response = client.post( + "/graphql", + json={ + "query": DELETE_TOKEN_MUTATION, + "variables": { + "device": "test_token", + }, + }, + ) + assert response.status_code == 200 + assert response.json()["data"] is None + + +def test_graphql_delete_token(authorized_client, tokens_file): + response = authorized_client.post( + "/graphql", + json={ + "query": DELETE_TOKEN_MUTATION, + "variables": { + "device": "test_token2", + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["deleteDeviceApiToken"]["success"] is True + assert response.json()["data"]["deleteDeviceApiToken"]["message"] is not None + assert response.json()["data"]["deleteDeviceApiToken"]["code"] == 200 + assert read_json(tokens_file) == { + "tokens": [ + { + "token": "TEST_TOKEN", + "name": "test_token", + "date": "2022-01-14 08:31:10.789314", + } + ] + } + + +def test_graphql_delete_self_token(authorized_client, tokens_file): + response = authorized_client.post( + "/graphql", + json={ + "query": DELETE_TOKEN_MUTATION, + "variables": { + "device": "test_token", + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["deleteDeviceApiToken"]["success"] is False + assert response.json()["data"]["deleteDeviceApiToken"]["message"] is not None + assert response.json()["data"]["deleteDeviceApiToken"]["code"] == 400 + assert read_json(tokens_file) == TOKENS_FILE_CONTETS + + +def test_graphql_delete_nonexistent_token(authorized_client, tokens_file): + response = authorized_client.post( + "/graphql", + json={ + "query": DELETE_TOKEN_MUTATION, + "variables": { + "device": "test_token3", + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["deleteDeviceApiToken"]["success"] is False + assert response.json()["data"]["deleteDeviceApiToken"]["message"] is not None + assert response.json()["data"]["deleteDeviceApiToken"]["code"] == 404 + assert read_json(tokens_file) == TOKENS_FILE_CONTETS + + +REFRESH_TOKEN_MUTATION = """ +mutation RefreshToken { + refreshDeviceApiToken { + success + message + code + token + } +} +""" + + +def test_graphql_refresh_token_unauthorized(client, tokens_file): + response = client.post( + "/graphql", + json={"query": REFRESH_TOKEN_MUTATION}, + ) + assert response.status_code == 200 + assert response.json()["data"] is None + + +def test_graphql_refresh_token(authorized_client, tokens_file): + response = authorized_client.post( + "/graphql", + json={"query": REFRESH_TOKEN_MUTATION}, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["refreshDeviceApiToken"]["success"] is True + assert response.json()["data"]["refreshDeviceApiToken"]["message"] is not None + assert response.json()["data"]["refreshDeviceApiToken"]["code"] == 200 + assert read_json(tokens_file)["tokens"][0] == { + "token": response.json()["data"]["refreshDeviceApiToken"]["token"], + "name": "test_token", + "date": "2022-01-14 08:31:10.789314", + } + + +NEW_DEVICE_KEY_MUTATION = """ +mutation NewDeviceKey { + getNewDeviceApiKey { + success + message + code + key + } +} +""" + + +def test_graphql_get_new_device_auth_key_unauthorized(client, tokens_file): + response = client.post( + "/graphql", + json={"query": NEW_DEVICE_KEY_MUTATION}, + ) + assert response.status_code == 200 + assert response.json()["data"] is None + + +def test_graphql_get_new_device_auth_key(authorized_client, tokens_file): + response = authorized_client.post( + "/graphql", + json={"query": NEW_DEVICE_KEY_MUTATION}, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["getNewDeviceApiKey"]["success"] is True + assert response.json()["data"]["getNewDeviceApiKey"]["message"] is not None + assert response.json()["data"]["getNewDeviceApiKey"]["code"] == 200 + assert ( + response.json()["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12 + ) + token = ( + Mnemonic(language="english") + .to_entropy(response.json()["data"]["getNewDeviceApiKey"]["key"]) + .hex() + ) + assert read_json(tokens_file)["new_device"]["token"] == token + + +INVALIDATE_NEW_DEVICE_KEY_MUTATION = """ +mutation InvalidateNewDeviceKey { + invalidateNewDeviceApiKey { + success + message + code + } +} +""" + + +def test_graphql_invalidate_new_device_token_unauthorized(client, tokens_file): + response = client.post( + "/graphql", + json={ + "query": DELETE_TOKEN_MUTATION, + "variables": { + "device": "test_token", + }, + }, + ) + assert response.status_code == 200 + assert response.json()["data"] is None + + +def test_graphql_get_and_delete_new_device_key(authorized_client, tokens_file): + response = authorized_client.post( + "/graphql", + json={"query": NEW_DEVICE_KEY_MUTATION}, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["getNewDeviceApiKey"]["success"] is True + assert response.json()["data"]["getNewDeviceApiKey"]["message"] is not None + assert response.json()["data"]["getNewDeviceApiKey"]["code"] == 200 + assert ( + response.json()["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12 + ) + token = ( + Mnemonic(language="english") + .to_entropy(response.json()["data"]["getNewDeviceApiKey"]["key"]) + .hex() + ) + assert read_json(tokens_file)["new_device"]["token"] == token + response = authorized_client.post( + "/graphql", + json={"query": INVALIDATE_NEW_DEVICE_KEY_MUTATION}, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["invalidateNewDeviceApiKey"]["success"] is True + assert response.json()["data"]["invalidateNewDeviceApiKey"]["message"] is not None + assert response.json()["data"]["invalidateNewDeviceApiKey"]["code"] == 200 + assert read_json(tokens_file) == TOKENS_FILE_CONTETS + + +AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION = """ +mutation AuthorizeWithNewDeviceKey($input: UseNewDeviceKeyInput!) { + authorizeWithNewDeviceApiKey(input: $input) { + success + message + code + token + } +} +""" + + +def test_graphql_get_and_authorize_new_device(client, authorized_client, tokens_file): + response = authorized_client.post( + "/graphql", + json={"query": NEW_DEVICE_KEY_MUTATION}, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["getNewDeviceApiKey"]["success"] is True + assert response.json()["data"]["getNewDeviceApiKey"]["message"] is not None + assert response.json()["data"]["getNewDeviceApiKey"]["code"] == 200 + mnemonic_key = response.json()["data"]["getNewDeviceApiKey"]["key"] + assert mnemonic_key.split(" ").__len__() == 12 + key = Mnemonic(language="english").to_entropy(mnemonic_key).hex() + assert read_json(tokens_file)["new_device"]["token"] == key + response = client.post( + "/graphql", + json={ + "query": AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION, + "variables": { + "input": { + "key": mnemonic_key, + "deviceName": "new_device", + } + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["success"] is True + assert ( + response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None + ) + assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 200 + token = response.json()["data"]["authorizeWithNewDeviceApiKey"]["token"] + assert read_json(tokens_file)["tokens"][2]["token"] == token + assert read_json(tokens_file)["tokens"][2]["name"] == "new_device" + + +def test_graphql_authorize_new_device_with_invalid_key(client, tokens_file): + response = client.post( + "/graphql", + json={ + "query": AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION, + "variables": { + "input": { + "key": "invalid_token", + "deviceName": "test_token", + } + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["success"] is False + assert ( + response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None + ) + assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404 + assert read_json(tokens_file) == TOKENS_FILE_CONTETS + + +def test_graphql_get_and_authorize_used_key(client, authorized_client, tokens_file): + response = authorized_client.post( + "/graphql", + json={"query": NEW_DEVICE_KEY_MUTATION}, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["getNewDeviceApiKey"]["success"] is True + assert response.json()["data"]["getNewDeviceApiKey"]["message"] is not None + assert response.json()["data"]["getNewDeviceApiKey"]["code"] == 200 + mnemonic_key = response.json()["data"]["getNewDeviceApiKey"]["key"] + assert mnemonic_key.split(" ").__len__() == 12 + key = Mnemonic(language="english").to_entropy(mnemonic_key).hex() + assert read_json(tokens_file)["new_device"]["token"] == key + response = client.post( + "/graphql", + json={ + "query": AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION, + "variables": { + "input": { + "key": mnemonic_key, + "deviceName": "new_token", + } + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["success"] is True + assert ( + response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None + ) + assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 200 + assert ( + read_json(tokens_file)["tokens"][2]["token"] + == response.json()["data"]["authorizeWithNewDeviceApiKey"]["token"] + ) + assert read_json(tokens_file)["tokens"][2]["name"] == "new_token" + + response = client.post( + "/graphql", + json={ + "query": AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION, + "variables": { + "input": { + "key": mnemonic_key, + "deviceName": "test_token2", + } + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["success"] is False + assert ( + response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None + ) + assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404 + assert read_json(tokens_file)["tokens"].__len__() == 3 + + +def test_graphql_get_and_authorize_key_after_12_minutes( + client, authorized_client, tokens_file +): + response = authorized_client.post( + "/graphql", + json={"query": NEW_DEVICE_KEY_MUTATION}, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["getNewDeviceApiKey"]["success"] is True + assert response.json()["data"]["getNewDeviceApiKey"]["message"] is not None + assert response.json()["data"]["getNewDeviceApiKey"]["code"] == 200 + assert ( + response.json()["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12 + ) + key = ( + Mnemonic(language="english") + .to_entropy(response.json()["data"]["getNewDeviceApiKey"]["key"]) + .hex() + ) + assert read_json(tokens_file)["new_device"]["token"] == key + + file_data = read_json(tokens_file) + file_data["new_device"]["expiration"] = str( + datetime.datetime.now() - datetime.timedelta(minutes=13) + ) + write_json(tokens_file, file_data) + + response = client.post( + "/graphql", + json={ + "query": AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION, + "variables": { + "input": { + "key": key, + "deviceName": "test_token", + } + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["success"] is False + assert ( + response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None + ) + assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404 + + +def test_graphql_authorize_without_token(client, tokens_file): + response = client.post( + "/graphql", + json={ + "query": AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION, + "variables": { + "input": { + "deviceName": "test_token", + } + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is None diff --git a/tests/test_graphql/test_api_recovery.py b/tests/test_graphql/test_api_recovery.py new file mode 100644 index 0000000..c5e229e --- /dev/null +++ b/tests/test_graphql/test_api_recovery.py @@ -0,0 +1,551 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=missing-function-docstring +import datetime + +from tests.common import generate_api_query, mnemonic_to_hex, read_json, write_json + +TOKENS_FILE_CONTETS = { + "tokens": [ + { + "token": "TEST_TOKEN", + "name": "test_token", + "date": "2022-01-14 08:31:10.789314", + }, + { + "token": "TEST_TOKEN2", + "name": "test_token2", + "date": "2022-01-14 08:31:10.789314", + }, + ] +} + +API_RECOVERY_QUERY = """ +recoveryKey { + exists + valid + creationDate + expirationDate + usesLeft +} +""" + + +def test_graphql_recovery_key_status_unauthorized(client, tokens_file): + response = client.post( + "/graphql", + json={"query": generate_api_query([API_RECOVERY_QUERY])}, + ) + assert response.status_code == 200 + assert response.json().get("data") is None + + +def test_graphql_recovery_key_status_when_none_exists(authorized_client, tokens_file): + response = authorized_client.post( + "/graphql", + json={"query": generate_api_query([API_RECOVERY_QUERY])}, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["api"]["recoveryKey"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["exists"] is False + assert response.json()["data"]["api"]["recoveryKey"]["valid"] is False + assert response.json()["data"]["api"]["recoveryKey"]["creationDate"] is None + assert response.json()["data"]["api"]["recoveryKey"]["expirationDate"] is None + assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] is None + + +API_RECOVERY_KEY_GENERATE_MUTATION = """ +mutation TestGenerateRecoveryKey($limits: RecoveryKeyLimitsInput) { + getNewRecoveryApiKey(limits: $limits) { + success + message + code + key + } +} +""" + +API_RECOVERY_KEY_USE_MUTATION = """ +mutation TestUseRecoveryKey($input: UseRecoveryKeyInput!) { + useRecoveryApiKey(input: $input) { + success + message + code + token + } +} +""" + + +def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): + response = authorized_client.post( + "/graphql", + json={ + "query": API_RECOVERY_KEY_GENERATE_MUTATION, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is True + assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is not None + assert ( + response.json()["data"]["getNewRecoveryApiKey"]["key"].split(" ").__len__() + == 18 + ) + assert read_json(tokens_file)["recovery_token"] is not None + time_generated = read_json(tokens_file)["recovery_token"]["date"] + assert time_generated is not None + key = response.json()["data"]["getNewRecoveryApiKey"]["key"] + assert ( + datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%f") + - datetime.timedelta(seconds=5) + < datetime.datetime.now() + ) + + # Try to get token status + response = authorized_client.post( + "/graphql", + json={"query": generate_api_query([API_RECOVERY_QUERY])}, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["api"]["recoveryKey"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True + assert response.json()["data"]["api"]["recoveryKey"]["valid"] is True + assert response.json()["data"]["api"]["recoveryKey"][ + "creationDate" + ] == time_generated.replace("Z", "") + assert response.json()["data"]["api"]["recoveryKey"]["expirationDate"] is None + assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] is None + + # Try to use token + response = client.post( + "/graphql", + json={ + "query": API_RECOVERY_KEY_USE_MUTATION, + "variables": { + "input": { + "key": key, + "deviceName": "new_test_token", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["useRecoveryApiKey"]["success"] is True + assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None + assert ( + response.json()["data"]["useRecoveryApiKey"]["token"] + == read_json(tokens_file)["tokens"][2]["token"] + ) + assert read_json(tokens_file)["tokens"][2]["name"] == "new_test_token" + + # Try to use token again + response = client.post( + "/graphql", + json={ + "query": API_RECOVERY_KEY_USE_MUTATION, + "variables": { + "input": { + "key": key, + "deviceName": "new_test_token2", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["useRecoveryApiKey"]["success"] is True + assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None + assert ( + response.json()["data"]["useRecoveryApiKey"]["token"] + == read_json(tokens_file)["tokens"][3]["token"] + ) + assert read_json(tokens_file)["tokens"][3]["name"] == "new_test_token2" + + +def test_graphql_generate_recovery_key_with_expiration_date( + client, authorized_client, tokens_file +): + expiration_date = datetime.datetime.now() + datetime.timedelta(minutes=5) + expiration_date_str = expiration_date.strftime("%Y-%m-%dT%H:%M:%S.%f") + response = authorized_client.post( + "/graphql", + json={ + "query": API_RECOVERY_KEY_GENERATE_MUTATION, + "variables": { + "limits": { + "expirationDate": expiration_date_str, + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is True + assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is not None + assert ( + response.json()["data"]["getNewRecoveryApiKey"]["key"].split(" ").__len__() + == 18 + ) + assert read_json(tokens_file)["recovery_token"] is not None + + key = response.json()["data"]["getNewRecoveryApiKey"]["key"] + assert read_json(tokens_file)["recovery_token"]["expiration"] == expiration_date_str + assert read_json(tokens_file)["recovery_token"]["token"] == mnemonic_to_hex(key) + + time_generated = read_json(tokens_file)["recovery_token"]["date"] + assert time_generated is not None + assert ( + datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%f") + - datetime.timedelta(seconds=5) + < datetime.datetime.now() + ) + + # Try to get token status + response = authorized_client.post( + "/graphql", + json={"query": generate_api_query([API_RECOVERY_QUERY])}, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["api"]["recoveryKey"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True + assert response.json()["data"]["api"]["recoveryKey"]["valid"] is True + assert response.json()["data"]["api"]["recoveryKey"][ + "creationDate" + ] == time_generated.replace("Z", "") + assert ( + response.json()["data"]["api"]["recoveryKey"]["expirationDate"] + == expiration_date_str + ) + assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] is None + + # Try to use token + response = authorized_client.post( + "/graphql", + json={ + "query": API_RECOVERY_KEY_USE_MUTATION, + "variables": { + "input": { + "key": key, + "deviceName": "new_test_token", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["useRecoveryApiKey"]["success"] is True + assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None + assert ( + response.json()["data"]["useRecoveryApiKey"]["token"] + == read_json(tokens_file)["tokens"][2]["token"] + ) + + # Try to use token again + response = authorized_client.post( + "/graphql", + json={ + "query": API_RECOVERY_KEY_USE_MUTATION, + "variables": { + "input": { + "key": key, + "deviceName": "new_test_token2", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["useRecoveryApiKey"]["success"] is True + assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None + assert ( + response.json()["data"]["useRecoveryApiKey"]["token"] + == read_json(tokens_file)["tokens"][3]["token"] + ) + + # Try to use token after expiration date + new_data = read_json(tokens_file) + new_data["recovery_token"]["expiration"] = ( + datetime.datetime.now() - datetime.timedelta(minutes=5) + ).strftime("%Y-%m-%dT%H:%M:%S.%f") + write_json(tokens_file, new_data) + response = authorized_client.post( + "/graphql", + json={ + "query": API_RECOVERY_KEY_USE_MUTATION, + "variables": { + "input": { + "key": key, + "deviceName": "new_test_token3", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["useRecoveryApiKey"]["success"] is False + assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["useRecoveryApiKey"]["code"] == 404 + assert response.json()["data"]["useRecoveryApiKey"]["token"] is None + + assert read_json(tokens_file)["tokens"] == new_data["tokens"] + + # Try to get token status + response = authorized_client.post( + "/graphql", + json={"query": generate_api_query([API_RECOVERY_QUERY])}, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["api"]["recoveryKey"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True + assert response.json()["data"]["api"]["recoveryKey"]["valid"] is False + assert ( + response.json()["data"]["api"]["recoveryKey"]["creationDate"] == time_generated + ) + assert ( + response.json()["data"]["api"]["recoveryKey"]["expirationDate"] + == new_data["recovery_token"]["expiration"] + ) + assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] is None + + +def test_graphql_generate_recovery_key_with_expiration_in_the_past( + authorized_client, tokens_file +): + expiration_date = datetime.datetime.now() - datetime.timedelta(minutes=5) + expiration_date_str = expiration_date.strftime("%Y-%m-%dT%H:%M:%S.%f") + + response = authorized_client.post( + "/graphql", + json={ + "query": API_RECOVERY_KEY_GENERATE_MUTATION, + "variables": { + "limits": { + "expirationDate": expiration_date_str, + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is False + assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 400 + assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is None + assert "recovery_token" not in read_json(tokens_file) + + +def test_graphql_generate_recovery_key_with_invalid_time_format( + authorized_client, tokens_file +): + expiration_date = "invalid_time_format" + expiration_date_str = expiration_date + + response = authorized_client.post( + "/graphql", + json={ + "query": API_RECOVERY_KEY_GENERATE_MUTATION, + "variables": { + "limits": { + "expirationDate": expiration_date_str, + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is None + + assert "recovery_token" not in read_json(tokens_file) + + +def test_graphql_generate_recovery_key_with_limited_uses( + authorized_client, tokens_file +): + + response = authorized_client.post( + "/graphql", + json={ + "query": API_RECOVERY_KEY_GENERATE_MUTATION, + "variables": { + "limits": { + "expirationDate": None, + "uses": 2, + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is True + assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is not None + + mnemonic_key = response.json()["data"]["getNewRecoveryApiKey"]["key"] + key = mnemonic_to_hex(mnemonic_key) + + assert read_json(tokens_file)["recovery_token"]["token"] == key + assert read_json(tokens_file)["recovery_token"]["uses_left"] == 2 + + # Try to get token status + response = authorized_client.post( + "/graphql", + json={"query": generate_api_query([API_RECOVERY_QUERY])}, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["api"]["recoveryKey"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True + assert response.json()["data"]["api"]["recoveryKey"]["valid"] is True + assert response.json()["data"]["api"]["recoveryKey"]["creationDate"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["expirationDate"] is None + assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] == 2 + + # Try to use token + response = authorized_client.post( + "/graphql", + json={ + "query": API_RECOVERY_KEY_USE_MUTATION, + "variables": { + "input": { + "key": mnemonic_key, + "deviceName": "test_token1", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["useRecoveryApiKey"]["success"] is True + assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None + + # Try to get token status + response = authorized_client.post( + "/graphql", + json={"query": generate_api_query([API_RECOVERY_QUERY])}, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["api"]["recoveryKey"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True + assert response.json()["data"]["api"]["recoveryKey"]["valid"] is True + assert response.json()["data"]["api"]["recoveryKey"]["creationDate"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["expirationDate"] is None + assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] == 1 + + # Try to use token + response = authorized_client.post( + "/graphql", + json={ + "query": API_RECOVERY_KEY_USE_MUTATION, + "variables": { + "input": { + "key": mnemonic_key, + "deviceName": "test_token2", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["useRecoveryApiKey"]["success"] is True + assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 + assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None + + # Try to get token status + response = authorized_client.post( + "/graphql", + json={"query": generate_api_query([API_RECOVERY_QUERY])}, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["api"]["recoveryKey"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True + assert response.json()["data"]["api"]["recoveryKey"]["valid"] is False + assert response.json()["data"]["api"]["recoveryKey"]["creationDate"] is not None + assert response.json()["data"]["api"]["recoveryKey"]["expirationDate"] is None + assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] == 0 + + # Try to use token + response = authorized_client.post( + "/graphql", + json={ + "query": API_RECOVERY_KEY_USE_MUTATION, + "variables": { + "input": { + "key": mnemonic_key, + "deviceName": "test_token3", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["useRecoveryApiKey"]["success"] is False + assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["useRecoveryApiKey"]["code"] == 404 + assert response.json()["data"]["useRecoveryApiKey"]["token"] is None + + +def test_graphql_generate_recovery_key_with_negative_uses( + authorized_client, tokens_file +): + # Try to get token status + response = authorized_client.post( + "/graphql", + json={ + "query": API_RECOVERY_KEY_GENERATE_MUTATION, + "variables": { + "limits": { + "uses": -1, + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is False + assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 400 + assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is None + + +def test_graphql_generate_recovery_key_with_zero_uses(authorized_client, tokens_file): + # Try to get token status + response = authorized_client.post( + "/graphql", + json={ + "query": API_RECOVERY_KEY_GENERATE_MUTATION, + "variables": { + "limits": { + "uses": 0, + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is False + assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None + assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 400 + assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is None diff --git a/tests/test_graphql/test_api_version.py b/tests/test_graphql/test_api_version.py new file mode 100644 index 0000000..64bcc36 --- /dev/null +++ b/tests/test_graphql/test_api_version.py @@ -0,0 +1,25 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=missing-function-docstring + +from tests.common import generate_api_query + +API_VERSION_QUERY = "version" + + +def test_graphql_get_api_version(authorized_client): + response = authorized_client.post( + "/graphql", + json={"query": generate_api_query([API_VERSION_QUERY])}, + ) + assert response.status_code == 200 + assert "version" in response.json()["data"]["api"] + + +def test_graphql_api_version_unauthorized(client): + response = client.post( + "/graphql", + json={"query": generate_api_query([API_VERSION_QUERY])}, + ) + assert response.status_code == 200 + assert "version" in response.json()["data"]["api"] diff --git a/tests/test_graphql/test_ssh.py b/tests/test_graphql/test_ssh.py new file mode 100644 index 0000000..4831692 --- /dev/null +++ b/tests/test_graphql/test_ssh.py @@ -0,0 +1,353 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +import pytest + +from tests.common import read_json + + +class ProcessMock: + """Mock subprocess.Popen""" + + def __init__(self, args, **kwargs): + self.args = args + self.kwargs = kwargs + + def communicate(): # pylint: disable=no-method-argument + return (b"NEW_HASHED", None) + + returncode = 0 + + +@pytest.fixture +def mock_subprocess_popen(mocker): + mock = mocker.patch("subprocess.Popen", autospec=True, return_value=ProcessMock) + return mock + + +@pytest.fixture +def some_users(mocker, datadir): + mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "some_users.json") + assert read_json(datadir / "some_users.json")["users"] == [ + { + "username": "user1", + "hashedPassword": "HASHED_PASSWORD_1", + "sshKeys": ["ssh-rsa KEY user1@pc"], + }, + {"username": "user2", "hashedPassword": "HASHED_PASSWORD_2", "sshKeys": []}, + {"username": "user3", "hashedPassword": "HASHED_PASSWORD_3"}, + ] + return datadir + + +# TESTS ######################################################## + + +API_CREATE_SSH_KEY_MUTATION = """ +mutation addSshKey($sshInput: SshMutationInput!) { + addSshKey(sshInput: $sshInput) { + success + message + code + user { + username + sshKeys + } + } +} +""" + + +def test_graphql_add_ssh_key_unauthorized(client, some_users, mock_subprocess_popen): + response = client.post( + "/graphql", + json={ + "query": API_CREATE_SSH_KEY_MUTATION, + "variables": { + "sshInput": { + "username": "user1", + "sshKey": "ssh-rsa KEY test_key@pc", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is None + + +def test_graphql_add_ssh_key(authorized_client, some_users, mock_subprocess_popen): + response = authorized_client.post( + "/graphql", + json={ + "query": API_CREATE_SSH_KEY_MUTATION, + "variables": { + "sshInput": { + "username": "user1", + "sshKey": "ssh-rsa KEY test_key@pc", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + + assert response.json()["data"]["addSshKey"]["code"] == 201 + assert response.json()["data"]["addSshKey"]["message"] is not None + assert response.json()["data"]["addSshKey"]["success"] is True + + assert response.json()["data"]["addSshKey"]["user"]["username"] == "user1" + assert response.json()["data"]["addSshKey"]["user"]["sshKeys"] == [ + "ssh-rsa KEY user1@pc", + "ssh-rsa KEY test_key@pc", + ] + + +def test_graphql_add_root_ssh_key(authorized_client, some_users, mock_subprocess_popen): + response = authorized_client.post( + "/graphql", + json={ + "query": API_CREATE_SSH_KEY_MUTATION, + "variables": { + "sshInput": { + "username": "root", + "sshKey": "ssh-rsa KEY test_key@pc", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + + assert response.json()["data"]["addSshKey"]["code"] == 201 + assert response.json()["data"]["addSshKey"]["message"] is not None + assert response.json()["data"]["addSshKey"]["success"] is True + + assert response.json()["data"]["addSshKey"]["user"]["username"] == "root" + assert response.json()["data"]["addSshKey"]["user"]["sshKeys"] == [ + "ssh-ed25519 KEY test@pc", + "ssh-rsa KEY test_key@pc", + ] + + +def test_graphql_add_main_ssh_key(authorized_client, some_users, mock_subprocess_popen): + response = authorized_client.post( + "/graphql", + json={ + "query": API_CREATE_SSH_KEY_MUTATION, + "variables": { + "sshInput": { + "username": "tester", + "sshKey": "ssh-rsa KEY test_key@pc", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + + assert response.json()["data"]["addSshKey"]["code"] == 201 + assert response.json()["data"]["addSshKey"]["message"] is not None + assert response.json()["data"]["addSshKey"]["success"] is True + + assert response.json()["data"]["addSshKey"]["user"]["username"] == "tester" + assert response.json()["data"]["addSshKey"]["user"]["sshKeys"] == [ + "ssh-rsa KEY test@pc", + "ssh-rsa KEY test_key@pc", + ] + + +def test_graphql_add_bad_ssh_key(authorized_client, some_users, mock_subprocess_popen): + response = authorized_client.post( + "/graphql", + json={ + "query": API_CREATE_SSH_KEY_MUTATION, + "variables": { + "sshInput": { + "username": "user1", + "sshKey": "trust me, this is the ssh key", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + + assert response.json()["data"]["addSshKey"]["code"] == 400 + assert response.json()["data"]["addSshKey"]["message"] is not None + assert response.json()["data"]["addSshKey"]["success"] is False + + +def test_graphql_add_ssh_key_nonexistent_user( + authorized_client, some_users, mock_subprocess_popen +): + response = authorized_client.post( + "/graphql", + json={ + "query": API_CREATE_SSH_KEY_MUTATION, + "variables": { + "sshInput": { + "username": "user666", + "sshKey": "ssh-rsa KEY test_key@pc", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + + assert response.json()["data"]["addSshKey"]["code"] == 404 + assert response.json()["data"]["addSshKey"]["message"] is not None + assert response.json()["data"]["addSshKey"]["success"] is False + + +API_REMOVE_SSH_KEY_MUTATION = """ +mutation removeSshKey($sshInput: SshMutationInput!) { + removeSshKey(sshInput: $sshInput) { + success + message + code + user { + username + sshKeys + } + } +} +""" + + +def test_graphql_remove_ssh_key_unauthorized(client, some_users, mock_subprocess_popen): + response = client.post( + "/graphql", + json={ + "query": API_REMOVE_SSH_KEY_MUTATION, + "variables": { + "sshInput": { + "username": "user1", + "sshKey": "ssh-rsa KEY test_key@pc", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is None + + +def test_graphql_remove_ssh_key(authorized_client, some_users, mock_subprocess_popen): + response = authorized_client.post( + "/graphql", + json={ + "query": API_REMOVE_SSH_KEY_MUTATION, + "variables": { + "sshInput": { + "username": "user1", + "sshKey": "ssh-rsa KEY user1@pc", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + + assert response.json()["data"]["removeSshKey"]["code"] == 200 + assert response.json()["data"]["removeSshKey"]["message"] is not None + assert response.json()["data"]["removeSshKey"]["success"] is True + + assert response.json()["data"]["removeSshKey"]["user"]["username"] == "user1" + assert response.json()["data"]["removeSshKey"]["user"]["sshKeys"] == [] + + +def test_graphql_remove_root_ssh_key( + authorized_client, some_users, mock_subprocess_popen +): + response = authorized_client.post( + "/graphql", + json={ + "query": API_REMOVE_SSH_KEY_MUTATION, + "variables": { + "sshInput": { + "username": "root", + "sshKey": "ssh-ed25519 KEY test@pc", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + + assert response.json()["data"]["removeSshKey"]["code"] == 200 + assert response.json()["data"]["removeSshKey"]["message"] is not None + assert response.json()["data"]["removeSshKey"]["success"] is True + + assert response.json()["data"]["removeSshKey"]["user"]["username"] == "root" + assert response.json()["data"]["removeSshKey"]["user"]["sshKeys"] == [] + + +def test_graphql_remove_main_ssh_key( + authorized_client, some_users, mock_subprocess_popen +): + response = authorized_client.post( + "/graphql", + json={ + "query": API_REMOVE_SSH_KEY_MUTATION, + "variables": { + "sshInput": { + "username": "tester", + "sshKey": "ssh-rsa KEY test@pc", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + + assert response.json()["data"]["removeSshKey"]["code"] == 200 + assert response.json()["data"]["removeSshKey"]["message"] is not None + assert response.json()["data"]["removeSshKey"]["success"] is True + + assert response.json()["data"]["removeSshKey"]["user"]["username"] == "tester" + assert response.json()["data"]["removeSshKey"]["user"]["sshKeys"] == [] + + +def test_graphql_remove_nonexistent_ssh_key( + authorized_client, some_users, mock_subprocess_popen +): + response = authorized_client.post( + "/graphql", + json={ + "query": API_REMOVE_SSH_KEY_MUTATION, + "variables": { + "sshInput": { + "username": "user1", + "sshKey": "ssh-rsa KEY test_key@pc", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + + assert response.json()["data"]["removeSshKey"]["code"] == 404 + assert response.json()["data"]["removeSshKey"]["message"] is not None + assert response.json()["data"]["removeSshKey"]["success"] is False + + +def test_graphql_remove_ssh_key_nonexistent_user( + authorized_client, some_users, mock_subprocess_popen +): + response = authorized_client.post( + "/graphql", + json={ + "query": API_REMOVE_SSH_KEY_MUTATION, + "variables": { + "sshInput": { + "username": "user666", + "sshKey": "ssh-rsa KEY test_key@pc", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + + assert response.json()["data"]["removeSshKey"]["code"] == 404 + assert response.json()["data"]["removeSshKey"]["message"] is not None + assert response.json()["data"]["removeSshKey"]["success"] is False diff --git a/tests/services/test_ssh/some_users.json b/tests/test_graphql/test_ssh/some_users.json similarity index 100% rename from tests/services/test_ssh/some_users.json rename to tests/test_graphql/test_ssh/some_users.json diff --git a/tests/test_graphql/test_system.py b/tests/test_graphql/test_system.py new file mode 100644 index 0000000..a021a16 --- /dev/null +++ b/tests/test_graphql/test_system.py @@ -0,0 +1,896 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=missing-function-docstring +import os +import pytest + +from tests.common import generate_system_query, read_json + + +@pytest.fixture +def domain_file(mocker, datadir): + mocker.patch("selfprivacy_api.utils.DOMAIN_FILE", datadir / "domain") + return datadir + + +@pytest.fixture +def turned_on(mocker, datadir): + mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_on.json") + assert read_json(datadir / "turned_on.json")["autoUpgrade"]["enable"] == True + assert read_json(datadir / "turned_on.json")["autoUpgrade"]["allowReboot"] == True + assert read_json(datadir / "turned_on.json")["timezone"] == "Europe/Moscow" + return datadir + + +@pytest.fixture +def turned_off(mocker, datadir): + mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_off.json") + assert read_json(datadir / "turned_off.json")["autoUpgrade"]["enable"] == False + assert read_json(datadir / "turned_off.json")["autoUpgrade"]["allowReboot"] == False + assert read_json(datadir / "turned_off.json")["timezone"] == "Europe/Moscow" + return datadir + + +@pytest.fixture +def undefined_config(mocker, datadir): + mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "undefined.json") + assert "autoUpgrade" not in read_json(datadir / "undefined.json") + assert "timezone" not in read_json(datadir / "undefined.json") + return datadir + + +@pytest.fixture +def no_values(mocker, datadir): + mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "no_values.json") + assert "enable" not in read_json(datadir / "no_values.json")["autoUpgrade"] + assert "allowReboot" not in read_json(datadir / "no_values.json")["autoUpgrade"] + return datadir + + +class ProcessMock: + """Mock subprocess.Popen""" + + def __init__(self, args, **kwargs): + self.args = args + self.kwargs = kwargs + + def communicate(): # pylint: disable=no-method-argument + return (b"", None) + + returncode = 0 + + +class BrokenServiceMock(ProcessMock): + """Mock subprocess.Popen for broken service""" + + def communicate(): # pylint: disable=no-method-argument + return (b"Testing error", None) + + returncode = 3 + + +@pytest.fixture +def mock_subprocess_popen(mocker): + mock = mocker.patch("subprocess.Popen", autospec=True, return_value=ProcessMock) + return mock + + +@pytest.fixture +def mock_os_chdir(mocker): + mock = mocker.patch("os.chdir", autospec=True) + return mock + + +@pytest.fixture +def mock_broken_service(mocker): + mock = mocker.patch( + "subprocess.Popen", autospec=True, return_value=BrokenServiceMock + ) + return mock + + +@pytest.fixture +def mock_subprocess_check_output(mocker): + mock = mocker.patch( + "subprocess.check_output", autospec=True, return_value=b"Testing Linux" + ) + return mock + + +@pytest.fixture +def mock_get_ip4(mocker): + mock = mocker.patch( + "selfprivacy_api.utils.network.get_ip4", + autospec=True, + return_value="157.90.247.192", + ) + return mock + + +@pytest.fixture +def mock_get_ip6(mocker): + mock = mocker.patch( + "selfprivacy_api.utils.network.get_ip6", + autospec=True, + return_value="fe80::9400:ff:fef1:34ae", + ) + return mock + + +@pytest.fixture +def mock_dkim_key(mocker): + mock = mocker.patch( + "selfprivacy_api.utils.get_dkim_key", + autospec=True, + return_value="I am a DKIM key", + ) + return mock + + +API_PYTHON_VERSION_INFO = """ +info { + pythonVersion +} +""" + + +def test_graphql_get_python_version_wrong_auth( + wrong_auth_client, mock_subprocess_check_output +): + """Test wrong auth""" + response = wrong_auth_client.post( + "/graphql", + json={ + "query": generate_system_query([API_PYTHON_VERSION_INFO]), + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is None + + +def test_graphql_get_python_version(authorized_client, mock_subprocess_check_output): + """Test get python version""" + response = authorized_client.post( + "/graphql", + json={ + "query": generate_system_query([API_PYTHON_VERSION_INFO]), + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["system"]["info"]["pythonVersion"] == "Testing Linux" + assert mock_subprocess_check_output.call_count == 1 + assert mock_subprocess_check_output.call_args[0][0] == ["python", "-V"] + + +API_SYSTEM_VERSION_INFO = """ +info { + systemVersion +} +""" + + +def test_graphql_get_system_version_unauthorized( + wrong_auth_client, mock_subprocess_check_output +): + """Test wrong auth""" + response = wrong_auth_client.post( + "/graphql", + json={ + "query": generate_system_query([API_SYSTEM_VERSION_INFO]), + }, + ) + + assert response.status_code == 200 + assert response.json().get("data") is None + + assert mock_subprocess_check_output.call_count == 0 + + +def test_graphql_get_system_version(authorized_client, mock_subprocess_check_output): + """Test get system version""" + response = authorized_client.post( + "/graphql", + json={ + "query": generate_system_query([API_SYSTEM_VERSION_INFO]), + }, + ) + + assert response.status_code == 200 + assert response.json().get("data") is not None + + assert response.json()["data"]["system"]["info"]["systemVersion"] == "Testing Linux" + assert mock_subprocess_check_output.call_count == 1 + assert mock_subprocess_check_output.call_args[0][0] == ["uname", "-a"] + + +API_GET_DOMAIN_INFO = """ +domainInfo { + domain + hostname + provider + requiredDnsRecords { + recordType + name + content + ttl + priority + } +} +""" + + +def dns_record( + record_type="A", name="test-domain.tld", content=None, ttl=3600, priority=None +): + if content is None: + if record_type == "A": + content = "157.90.247.192" + elif record_type == "AAAA": + content = "fe80::9400:ff:fef1:34ae" + return { + "recordType": record_type, + "name": name, + "content": content, + "ttl": ttl, + "priority": priority, + } + + +def is_dns_record_in_array(records, dns_record) -> bool: + for record in records: + if ( + record["recordType"] == dns_record["recordType"] + and record["name"] == dns_record["name"] + and record["content"] == dns_record["content"] + and record["ttl"] == dns_record["ttl"] + and record["priority"] == dns_record["priority"] + ): + return True + return False + + +def test_graphql_get_domain( + authorized_client, domain_file, mock_get_ip4, mock_get_ip6, turned_on, mock_dkim_key +): + """Test get domain""" + response = authorized_client.post( + "/graphql", + json={ + "query": generate_system_query([API_GET_DOMAIN_INFO]), + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["system"]["domainInfo"]["domain"] == "test.tld" + assert ( + response.json()["data"]["system"]["domainInfo"]["hostname"] == "test-instance" + ) + assert response.json()["data"]["system"]["domainInfo"]["provider"] == "CLOUDFLARE" + dns_records = response.json()["data"]["system"]["domainInfo"]["requiredDnsRecords"] + assert is_dns_record_in_array(dns_records, dns_record()) + assert is_dns_record_in_array(dns_records, dns_record(record_type="AAAA")) + assert is_dns_record_in_array(dns_records, dns_record(name="api")) + assert is_dns_record_in_array( + dns_records, dns_record(name="api", record_type="AAAA") + ) + assert is_dns_record_in_array(dns_records, dns_record(name="cloud")) + assert is_dns_record_in_array( + dns_records, dns_record(name="cloud", record_type="AAAA") + ) + assert is_dns_record_in_array(dns_records, dns_record(name="git")) + assert is_dns_record_in_array( + dns_records, dns_record(name="git", record_type="AAAA") + ) + assert is_dns_record_in_array(dns_records, dns_record(name="meet")) + assert is_dns_record_in_array( + dns_records, dns_record(name="meet", record_type="AAAA") + ) + assert is_dns_record_in_array(dns_records, dns_record(name="password")) + assert is_dns_record_in_array( + dns_records, dns_record(name="password", record_type="AAAA") + ) + assert is_dns_record_in_array(dns_records, dns_record(name="social")) + assert is_dns_record_in_array( + dns_records, dns_record(name="social", record_type="AAAA") + ) + assert is_dns_record_in_array(dns_records, dns_record(name="vpn")) + assert is_dns_record_in_array( + dns_records, dns_record(name="vpn", record_type="AAAA") + ) + assert is_dns_record_in_array( + dns_records, + dns_record( + name="test-domain.tld", + record_type="MX", + content="test-domain.tld", + priority=10, + ), + ) + assert is_dns_record_in_array( + dns_records, + dns_record( + name="_dmarc", record_type="TXT", content="v=DMARC1; p=none", ttl=18000 + ), + ) + assert is_dns_record_in_array( + dns_records, + dns_record( + name="test-domain.tld", + record_type="TXT", + content="v=spf1 a mx ip4:157.90.247.192 -all", + ttl=18000, + ), + ) + assert is_dns_record_in_array( + dns_records, + dns_record( + name="selector._domainkey", + record_type="TXT", + content="I am a DKIM key", + ttl=18000, + ), + ) + + +API_GET_TIMEZONE = """ +settings { + timezone +} +""" + + +def test_graphql_get_timezone_unauthorized(client, turned_on): + """Test get timezone without auth""" + response = client.post( + "/graphql", + json={ + "query": generate_system_query([API_GET_TIMEZONE]), + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is None + + +def test_graphql_get_timezone(authorized_client, turned_on): + """Test get timezone""" + response = authorized_client.post( + "/graphql", + json={ + "query": generate_system_query([API_GET_TIMEZONE]), + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["system"]["settings"]["timezone"] == "Europe/Moscow" + + +def test_graphql_get_timezone_on_undefined(authorized_client, undefined_config): + """Test get timezone when none is defined in config""" + response = authorized_client.post( + "/graphql", + json={ + "query": generate_system_query([API_GET_TIMEZONE]), + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert ( + response.json()["data"]["system"]["settings"]["timezone"] == "Europe/Uzhgorod" + ) + + +API_CHANGE_TIMEZONE_MUTATION = """ +mutation changeTimezone($timezone: String!) { + changeTimezone(timezone: $timezone) { + success + message + code + timezone + } +} +""" + + +def test_graphql_change_timezone_unauthorized(client, turned_on): + """Test change timezone without auth""" + response = client.post( + "/graphql", + json={ + "query": API_CHANGE_TIMEZONE_MUTATION, + "variables": { + "timezone": "Europe/Moscow", + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is None + + +def test_graphql_change_timezone(authorized_client, turned_on): + """Test change timezone""" + response = authorized_client.post( + "/graphql", + json={ + "query": API_CHANGE_TIMEZONE_MUTATION, + "variables": { + "timezone": "Europe/Helsinki", + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["changeTimezone"]["success"] is True + assert response.json()["data"]["changeTimezone"]["message"] is not None + assert response.json()["data"]["changeTimezone"]["code"] == 200 + assert response.json()["data"]["changeTimezone"]["timezone"] == "Europe/Helsinki" + assert read_json(turned_on / "turned_on.json")["timezone"] == "Europe/Helsinki" + + +def test_graphql_change_timezone_on_undefined(authorized_client, undefined_config): + """Test change timezone when none is defined in config""" + response = authorized_client.post( + "/graphql", + json={ + "query": API_CHANGE_TIMEZONE_MUTATION, + "variables": { + "timezone": "Europe/Helsinki", + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["changeTimezone"]["success"] is True + assert response.json()["data"]["changeTimezone"]["message"] is not None + assert response.json()["data"]["changeTimezone"]["code"] == 200 + assert response.json()["data"]["changeTimezone"]["timezone"] == "Europe/Helsinki" + assert ( + read_json(undefined_config / "undefined.json")["timezone"] == "Europe/Helsinki" + ) + + +def test_graphql_change_timezone_without_timezone(authorized_client, turned_on): + """Test change timezone without timezone""" + response = authorized_client.post( + "/graphql", + json={ + "query": API_CHANGE_TIMEZONE_MUTATION, + "variables": { + "timezone": "", + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["changeTimezone"]["success"] is False + assert response.json()["data"]["changeTimezone"]["message"] is not None + assert response.json()["data"]["changeTimezone"]["code"] == 400 + assert response.json()["data"]["changeTimezone"]["timezone"] is None + assert read_json(turned_on / "turned_on.json")["timezone"] == "Europe/Moscow" + + +def test_graphql_change_timezone_with_invalid_timezone(authorized_client, turned_on): + """Test change timezone with invalid timezone""" + response = authorized_client.post( + "/graphql", + json={ + "query": API_CHANGE_TIMEZONE_MUTATION, + "variables": { + "timezone": "Invlaid/Timezone", + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["changeTimezone"]["success"] is False + assert response.json()["data"]["changeTimezone"]["message"] is not None + assert response.json()["data"]["changeTimezone"]["code"] == 400 + assert response.json()["data"]["changeTimezone"]["timezone"] is None + assert read_json(turned_on / "turned_on.json")["timezone"] == "Europe/Moscow" + + +API_GET_AUTO_UPGRADE_SETTINGS_QUERY = """ +settings { + autoUpgrade { + enable + allowReboot + } +} +""" + + +def test_graphql_get_auto_upgrade_unauthorized(client, turned_on): + """Test get auto upgrade settings without auth""" + response = client.post( + "/graphql", + json={ + "query": generate_system_query([API_GET_AUTO_UPGRADE_SETTINGS_QUERY]), + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is None + + +def test_graphql_get_auto_upgrade(authorized_client, turned_on): + """Test get auto upgrade settings""" + response = authorized_client.post( + "/graphql", + json={ + "query": generate_system_query([API_GET_AUTO_UPGRADE_SETTINGS_QUERY]), + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert ( + response.json()["data"]["system"]["settings"]["autoUpgrade"]["enable"] is True + ) + assert ( + response.json()["data"]["system"]["settings"]["autoUpgrade"]["allowReboot"] + is True + ) + + +def test_graphql_get_auto_upgrade_on_undefined(authorized_client, undefined_config): + """Test get auto upgrade settings when none is defined in config""" + response = authorized_client.post( + "/graphql", + json={ + "query": generate_system_query([API_GET_AUTO_UPGRADE_SETTINGS_QUERY]), + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert ( + response.json()["data"]["system"]["settings"]["autoUpgrade"]["enable"] is True + ) + assert ( + response.json()["data"]["system"]["settings"]["autoUpgrade"]["allowReboot"] + is False + ) + + +def test_graphql_get_auto_upgrade_without_vlaues(authorized_client, no_values): + """Test get auto upgrade settings without values""" + response = authorized_client.post( + "/graphql", + json={ + "query": generate_system_query([API_GET_AUTO_UPGRADE_SETTINGS_QUERY]), + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert ( + response.json()["data"]["system"]["settings"]["autoUpgrade"]["enable"] is True + ) + assert ( + response.json()["data"]["system"]["settings"]["autoUpgrade"]["allowReboot"] + is False + ) + + +def test_graphql_get_auto_upgrade_turned_off(authorized_client, turned_off): + """Test get auto upgrade settings when turned off""" + response = authorized_client.post( + "/graphql", + json={ + "query": generate_system_query([API_GET_AUTO_UPGRADE_SETTINGS_QUERY]), + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert ( + response.json()["data"]["system"]["settings"]["autoUpgrade"]["enable"] is False + ) + assert ( + response.json()["data"]["system"]["settings"]["autoUpgrade"]["allowReboot"] + is False + ) + + +API_CHANGE_AUTO_UPGRADE_SETTINGS = """ +mutation changeServerSettings($settings: AutoUpgradeSettingsInput!) { + changeAutoUpgradeSettings(settings: $settings) { + success + message + code + enableAutoUpgrade + allowReboot + } +} +""" + + +def test_graphql_change_auto_upgrade_unauthorized(client, turned_on): + """Test change auto upgrade settings without auth""" + response = client.post( + "/graphql", + json={ + "query": API_CHANGE_AUTO_UPGRADE_SETTINGS, + "variables": { + "settings": { + "enableAutoUpgrade": True, + "allowReboot": True, + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is None + + +def test_graphql_change_auto_upgrade(authorized_client, turned_on): + """Test change auto upgrade settings""" + response = authorized_client.post( + "/graphql", + json={ + "query": API_CHANGE_AUTO_UPGRADE_SETTINGS, + "variables": { + "settings": { + "enableAutoUpgrade": False, + "allowReboot": True, + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 + assert ( + response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + is False + ) + assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True + assert read_json(turned_on / "turned_on.json")["autoUpgrade"]["enable"] is False + assert read_json(turned_on / "turned_on.json")["autoUpgrade"]["allowReboot"] is True + + +def test_graphql_change_auto_upgrade_on_undefined(authorized_client, undefined_config): + """Test change auto upgrade settings when none is defined in config""" + response = authorized_client.post( + "/graphql", + json={ + "query": API_CHANGE_AUTO_UPGRADE_SETTINGS, + "variables": { + "settings": { + "enableAutoUpgrade": False, + "allowReboot": True, + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 + assert ( + response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + is False + ) + assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True + assert ( + read_json(undefined_config / "undefined.json")["autoUpgrade"]["enable"] is False + ) + assert ( + read_json(undefined_config / "undefined.json")["autoUpgrade"]["allowReboot"] + is True + ) + + +def test_graphql_change_auto_upgrade_without_vlaues(authorized_client, no_values): + """Test change auto upgrade settings without values""" + response = authorized_client.post( + "/graphql", + json={ + "query": API_CHANGE_AUTO_UPGRADE_SETTINGS, + "variables": { + "settings": { + "enableAutoUpgrade": True, + "allowReboot": True, + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 + assert ( + response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + is True + ) + assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True + assert read_json(no_values / "no_values.json")["autoUpgrade"]["enable"] is True + assert read_json(no_values / "no_values.json")["autoUpgrade"]["allowReboot"] is True + + +def test_graphql_change_auto_upgrade_turned_off(authorized_client, turned_off): + """Test change auto upgrade settings when turned off""" + response = authorized_client.post( + "/graphql", + json={ + "query": API_CHANGE_AUTO_UPGRADE_SETTINGS, + "variables": { + "settings": { + "enableAutoUpgrade": True, + "allowReboot": True, + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 + assert ( + response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + is True + ) + assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True + assert read_json(turned_off / "turned_off.json")["autoUpgrade"]["enable"] is True + assert ( + read_json(turned_off / "turned_off.json")["autoUpgrade"]["allowReboot"] is True + ) + + +def test_grphql_change_auto_upgrade_without_enable(authorized_client, turned_off): + """Test change auto upgrade settings without enable""" + response = authorized_client.post( + "/graphql", + json={ + "query": API_CHANGE_AUTO_UPGRADE_SETTINGS, + "variables": { + "settings": { + "allowReboot": True, + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 + assert ( + response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + is False + ) + assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is True + assert read_json(turned_off / "turned_off.json")["autoUpgrade"]["enable"] is False + assert ( + read_json(turned_off / "turned_off.json")["autoUpgrade"]["allowReboot"] is True + ) + + +def test_graphql_change_auto_upgrade_without_allow_reboot( + authorized_client, turned_off +): + """Test change auto upgrade settings without allow reboot""" + response = authorized_client.post( + "/graphql", + json={ + "query": API_CHANGE_AUTO_UPGRADE_SETTINGS, + "variables": { + "settings": { + "enableAutoUpgrade": True, + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 + assert ( + response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + is True + ) + assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is False + assert read_json(turned_off / "turned_off.json")["autoUpgrade"]["enable"] is True + assert ( + read_json(turned_off / "turned_off.json")["autoUpgrade"]["allowReboot"] is False + ) + + +def test_graphql_change_auto_upgrade_with_empty_input(authorized_client, turned_off): + """Test change auto upgrade settings with empty input""" + response = authorized_client.post( + "/graphql", + json={ + "query": API_CHANGE_AUTO_UPGRADE_SETTINGS, + "variables": { + "settings": {}, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["success"] is True + assert response.json()["data"]["changeAutoUpgradeSettings"]["message"] is not None + assert response.json()["data"]["changeAutoUpgradeSettings"]["code"] == 200 + assert ( + response.json()["data"]["changeAutoUpgradeSettings"]["enableAutoUpgrade"] + is False + ) + assert response.json()["data"]["changeAutoUpgradeSettings"]["allowReboot"] is False + assert read_json(turned_off / "turned_off.json")["autoUpgrade"]["enable"] is False + assert ( + read_json(turned_off / "turned_off.json")["autoUpgrade"]["allowReboot"] is False + ) + + +API_PULL_SYSTEM_CONFIGURATION_MUTATION = """ +mutation testPullSystemConfiguration { + pullRepositoryChanges { + success + message + code + } +} +""" + + +def test_graphql_pull_system_configuration_unauthorized(client, mock_subprocess_popen): + response = client.post( + "/graphql", + json={ + "query": API_PULL_SYSTEM_CONFIGURATION_MUTATION, + }, + ) + + assert response.status_code == 200 + assert response.json().get("data") is None + assert mock_subprocess_popen.call_count == 0 + + +def test_graphql_pull_system_configuration( + authorized_client, mock_subprocess_popen, mock_os_chdir +): + current_dir = os.getcwd() + response = authorized_client.post( + "/graphql", + json={ + "query": API_PULL_SYSTEM_CONFIGURATION_MUTATION, + }, + ) + + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["pullRepositoryChanges"]["success"] is True + assert response.json()["data"]["pullRepositoryChanges"]["message"] is not None + assert response.json()["data"]["pullRepositoryChanges"]["code"] == 200 + + assert mock_subprocess_popen.call_count == 1 + assert mock_subprocess_popen.call_args[0][0] == ["git", "pull"] + assert mock_os_chdir.call_count == 2 + assert mock_os_chdir.call_args_list[0][0][0] == "/etc/nixos" + assert mock_os_chdir.call_args_list[1][0][0] == current_dir + + +def test_graphql_pull_system_broken_repo( + authorized_client, mock_broken_service, mock_os_chdir +): + current_dir = os.getcwd() + + response = authorized_client.post( + "/graphql", + json={ + "query": API_PULL_SYSTEM_CONFIGURATION_MUTATION, + }, + ) + + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["pullRepositoryChanges"]["success"] is False + assert response.json()["data"]["pullRepositoryChanges"]["message"] is not None + assert response.json()["data"]["pullRepositoryChanges"]["code"] == 500 + + assert mock_broken_service.call_count == 1 + assert mock_os_chdir.call_count == 2 + assert mock_os_chdir.call_args_list[0][0][0] == "/etc/nixos" + assert mock_os_chdir.call_args_list[1][0][0] == current_dir diff --git a/tests/test_system/domain b/tests/test_graphql/test_system/domain similarity index 100% rename from tests/test_system/domain rename to tests/test_graphql/test_system/domain diff --git a/tests/test_system/no_values.json b/tests/test_graphql/test_system/no_values.json similarity index 100% rename from tests/test_system/no_values.json rename to tests/test_graphql/test_system/no_values.json diff --git a/tests/test_system/turned_off.json b/tests/test_graphql/test_system/turned_off.json similarity index 100% rename from tests/test_system/turned_off.json rename to tests/test_graphql/test_system/turned_off.json diff --git a/tests/test_graphql/test_system/turned_on.json b/tests/test_graphql/test_system/turned_on.json new file mode 100644 index 0000000..821875b --- /dev/null +++ b/tests/test_graphql/test_system/turned_on.json @@ -0,0 +1,55 @@ +{ + "backblaze": { + "accountId": "ID", + "accountKey": "KEY", + "bucket": "selfprivacy" + }, + "api": { + "token": "TEST_TOKEN", + "enableSwagger": false + }, + "bitwarden": { + "enable": true + }, + "cloudflare": { + "apiKey": "TOKEN" + }, + "databasePassword": "PASSWORD", + "domain": "test.tld", + "hashedMasterPassword": "HASHED_PASSWORD", + "hostname": "test-instance", + "nextcloud": { + "adminPassword": "ADMIN", + "databasePassword": "ADMIN", + "enable": true + }, + "resticPassword": "PASS", + "ssh": { + "enable": true, + "passwordAuthentication": true, + "rootKeys": [ + "ssh-ed25519 KEY test@pc" + ] + }, + "username": "tester", + "gitea": { + "enable": true + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true + }, + "jitsi": { + "enable": true + }, + "autoUpgrade": { + "enable": true, + "allowReboot": true + }, + "timezone": "Europe/Moscow", + "sshKeys": [ + "ssh-rsa KEY test@pc" + ] +} diff --git a/tests/test_system/undefined.json b/tests/test_graphql/test_system/undefined.json similarity index 100% rename from tests/test_system/undefined.json rename to tests/test_graphql/test_system/undefined.json diff --git a/tests/test_graphql/test_system_nixos_tasks.py b/tests/test_graphql/test_system_nixos_tasks.py new file mode 100644 index 0000000..3e823b6 --- /dev/null +++ b/tests/test_graphql/test_system_nixos_tasks.py @@ -0,0 +1,231 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=missing-function-docstring +import pytest + + +@pytest.fixture +def domain_file(mocker, datadir): + mocker.patch("selfprivacy_api.utils.DOMAIN_FILE", datadir / "domain") + return datadir + + +class ProcessMock: + """Mock subprocess.Popen""" + + def __init__(self, args, **kwargs): + self.args = args + self.kwargs = kwargs + + def communicate(): # pylint: disable=no-method-argument + return (b"", None) + + returncode = 0 + + +class BrokenServiceMock(ProcessMock): + """Mock subprocess.Popen for broken service""" + + def communicate(): # pylint: disable=no-method-argument + return (b"Testing error", None) + + returncode = 3 + + +@pytest.fixture +def mock_subprocess_popen(mocker): + mock = mocker.patch("subprocess.Popen", autospec=True, return_value=ProcessMock) + return mock + + +@pytest.fixture +def mock_os_chdir(mocker): + mock = mocker.patch("os.chdir", autospec=True) + return mock + + +@pytest.fixture +def mock_subprocess_check_output(mocker): + mock = mocker.patch( + "subprocess.check_output", autospec=True, return_value=b"Testing Linux" + ) + return mock + + +API_REBUILD_SYSTEM_MUTATION = """ +mutation rebuildSystem { + runSystemRebuild { + success + message + code + } +} +""" + + +def test_graphql_system_rebuild_unauthorized(client, mock_subprocess_popen): + """Test system rebuild without authorization""" + response = client.post( + "/graphql", + json={ + "query": API_REBUILD_SYSTEM_MUTATION, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is None + assert mock_subprocess_popen.call_count == 0 + + +def test_graphql_system_rebuild(authorized_client, mock_subprocess_popen): + """Test system rebuild""" + response = authorized_client.post( + "/graphql", + json={ + "query": API_REBUILD_SYSTEM_MUTATION, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["runSystemRebuild"]["success"] is True + assert response.json()["data"]["runSystemRebuild"]["message"] is not None + assert response.json()["data"]["runSystemRebuild"]["code"] == 200 + assert mock_subprocess_popen.call_count == 1 + assert mock_subprocess_popen.call_args[0][0] == [ + "systemctl", + "start", + "sp-nixos-rebuild.service", + ] + + +API_UPGRADE_SYSTEM_MUTATION = """ +mutation upgradeSystem { + runSystemUpgrade { + success + message + code + } +} +""" + + +def test_graphql_system_upgrade_unauthorized(client, mock_subprocess_popen): + """Test system upgrade without authorization""" + response = client.post( + "/graphql", + json={ + "query": API_UPGRADE_SYSTEM_MUTATION, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is None + assert mock_subprocess_popen.call_count == 0 + + +def test_graphql_system_upgrade(authorized_client, mock_subprocess_popen): + """Test system upgrade""" + response = authorized_client.post( + "/graphql", + json={ + "query": API_UPGRADE_SYSTEM_MUTATION, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["runSystemUpgrade"]["success"] is True + assert response.json()["data"]["runSystemUpgrade"]["message"] is not None + assert response.json()["data"]["runSystemUpgrade"]["code"] == 200 + assert mock_subprocess_popen.call_count == 1 + assert mock_subprocess_popen.call_args[0][0] == [ + "systemctl", + "start", + "sp-nixos-upgrade.service", + ] + + +API_ROLLBACK_SYSTEM_MUTATION = """ +mutation rollbackSystem { + runSystemRollback { + success + message + code + } +} +""" + + +def test_graphql_system_rollback_unauthorized(client, mock_subprocess_popen): + """Test system rollback without authorization""" + response = client.post( + "/graphql", + json={ + "query": API_ROLLBACK_SYSTEM_MUTATION, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is None + assert mock_subprocess_popen.call_count == 0 + + +def test_graphql_system_rollback(authorized_client, mock_subprocess_popen): + """Test system rollback""" + response = authorized_client.post( + "/graphql", + json={ + "query": API_ROLLBACK_SYSTEM_MUTATION, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"]["runSystemRollback"]["success"] is True + assert response.json()["data"]["runSystemRollback"]["message"] is not None + assert response.json()["data"]["runSystemRollback"]["code"] == 200 + assert mock_subprocess_popen.call_count == 1 + assert mock_subprocess_popen.call_args[0][0] == [ + "systemctl", + "start", + "sp-nixos-rollback.service", + ] + + +API_REBOOT_SYSTEM_MUTATION = """ +mutation system { + rebootSystem { + success + message + code + } +} +""" + + +def test_graphql_reboot_system_unauthorized(client, mock_subprocess_popen): + response = client.post( + "/graphql", + json={ + "query": API_REBOOT_SYSTEM_MUTATION, + }, + ) + + assert response.status_code == 200 + assert response.json().get("data") is None + + assert mock_subprocess_popen.call_count == 0 + + +def test_graphql_reboot_system(authorized_client, mock_subprocess_popen): + response = authorized_client.post( + "/graphql", + json={ + "query": API_REBOOT_SYSTEM_MUTATION, + }, + ) + + assert response.status_code == 200 + assert response.json().get("data") is not None + + assert response.json()["data"]["rebootSystem"]["success"] is True + assert response.json()["data"]["rebootSystem"]["message"] is not None + assert response.json()["data"]["rebootSystem"]["code"] == 200 + + assert mock_subprocess_popen.call_count == 1 + assert mock_subprocess_popen.call_args[0][0] == ["reboot"] diff --git a/tests/test_graphql/test_users.py b/tests/test_graphql/test_users.py new file mode 100644 index 0000000..c36dcb2 --- /dev/null +++ b/tests/test_graphql/test_users.py @@ -0,0 +1,733 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +import pytest + +from tests.common import ( + generate_users_query, + read_json, +) + +invalid_usernames = [ + "messagebus", + "postfix", + "polkituser", + "dovecot2", + "dovenull", + "nginx", + "postgres", + "systemd-journal-gateway", + "prosody", + "systemd-network", + "systemd-resolve", + "systemd-timesync", + "opendkim", + "rspamd", + "sshd", + "selfprivacy-api", + "restic", + "redis", + "pleroma", + "ocserv", + "nextcloud", + "memcached", + "knot-resolver", + "gitea", + "bitwarden_rs", + "vaultwarden", + "acme", + "virtualMail", + "nixbld1", + "nixbld2", + "nixbld29", + "nobody", +] + + +## FIXTURES ################################################### + + +@pytest.fixture +def no_users(mocker, datadir): + mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "no_users.json") + assert read_json(datadir / "no_users.json")["users"] == [] + return datadir + + +@pytest.fixture +def one_user(mocker, datadir): + mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "one_user.json") + assert read_json(datadir / "one_user.json")["users"] == [ + { + "username": "user1", + "hashedPassword": "HASHED_PASSWORD_1", + "sshKeys": ["ssh-rsa KEY user1@pc"], + } + ] + return datadir + + +@pytest.fixture +def some_users(mocker, datadir): + mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "some_users.json") + assert read_json(datadir / "some_users.json")["users"] == [ + { + "username": "user1", + "hashedPassword": "HASHED_PASSWORD_1", + "sshKeys": ["ssh-rsa KEY user1@pc"], + }, + {"username": "user2", "hashedPassword": "HASHED_PASSWORD_2", "sshKeys": []}, + {"username": "user3", "hashedPassword": "HASHED_PASSWORD_3"}, + ] + return datadir + + +@pytest.fixture +def undefined_settings(mocker, datadir): + mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "undefined.json") + assert "users" not in read_json(datadir / "undefined.json") + return datadir + + +class ProcessMock: + """Mock subprocess.Popen""" + + def __init__(self, args, **kwargs): + self.args = args + self.kwargs = kwargs + + def communicate(): # pylint: disable=no-method-argument + return (b"NEW_HASHED", None) + + returncode = 0 + + +@pytest.fixture +def mock_subprocess_popen(mocker): + mock = mocker.patch("subprocess.Popen", autospec=True, return_value=ProcessMock) + return mock + + +## TESTS ###################################################### + +API_USERS_INFO = """ +allUsers { + username + sshKeys +} +""" + + +def test_graphql_get_users_unauthorized(client, some_users, mock_subprocess_popen): + """Test wrong auth""" + response = client.post( + "/graphql", + json={ + "query": generate_users_query([API_USERS_INFO]), + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is None + + +def test_graphql_get_some_users(authorized_client, some_users, mock_subprocess_popen): + response = authorized_client.post( + "/graphql", + json={ + "query": generate_users_query([API_USERS_INFO]), + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + assert len(response.json()["data"]["users"]["allUsers"]) == 4 + assert response.json()["data"]["users"]["allUsers"][0]["username"] == "user1" + assert response.json()["data"]["users"]["allUsers"][0]["sshKeys"] == [ + "ssh-rsa KEY user1@pc" + ] + + assert response.json()["data"]["users"]["allUsers"][1]["username"] == "user2" + assert response.json()["data"]["users"]["allUsers"][1]["sshKeys"] == [] + + assert response.json()["data"]["users"]["allUsers"][3]["username"] == "tester" + assert response.json()["data"]["users"]["allUsers"][3]["sshKeys"] == [ + "ssh-rsa KEY test@pc" + ] + + +def test_graphql_get_no_users(authorized_client, no_users, mock_subprocess_popen): + response = authorized_client.post( + "/graphql", + json={ + "query": generate_users_query([API_USERS_INFO]), + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + + assert len(response.json()["data"]["users"]["allUsers"]) == 1 + assert response.json()["data"]["users"]["allUsers"][0]["username"] == "tester" + assert response.json()["data"]["users"]["allUsers"][0]["sshKeys"] == [ + "ssh-rsa KEY test@pc" + ] + + +API_GET_USERS = """ +query TestUsers($username: String!) { + users { + getUser(username: $username) { + sshKeys + username + } + } +} +""" + + +def test_graphql_get_one_user_unauthorized(client, one_user, mock_subprocess_popen): + response = client.post( + "/graphql", + json={ + "query": API_GET_USERS, + "variables": { + "username": "user1", + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is None + + +def test_graphql_get_one_user(authorized_client, one_user, mock_subprocess_popen): + + response = authorized_client.post( + "/graphql", + json={ + "query": API_GET_USERS, + "variables": { + "username": "user1", + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + + assert len(response.json()["data"]["users"]["getUser"]) == 2 + assert response.json()["data"]["users"]["getUser"]["username"] == "user1" + assert response.json()["data"]["users"]["getUser"]["sshKeys"] == [ + "ssh-rsa KEY user1@pc" + ] + + +def test_graphql_get_some_user(authorized_client, some_users, mock_subprocess_popen): + response = authorized_client.post( + "/graphql", + json={ + "query": API_GET_USERS, + "variables": { + "username": "user2", + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + + assert len(response.json()["data"]["users"]["getUser"]) == 2 + assert response.json()["data"]["users"]["getUser"]["username"] == "user2" + assert response.json()["data"]["users"]["getUser"]["sshKeys"] == [] + + +def test_graphql_get_root_user(authorized_client, some_users, mock_subprocess_popen): + response = authorized_client.post( + "/graphql", + json={ + "query": API_GET_USERS, + "variables": { + "username": "root", + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + + assert len(response.json()["data"]["users"]["getUser"]) == 2 + assert response.json()["data"]["users"]["getUser"]["username"] == "root" + assert response.json()["data"]["users"]["getUser"]["sshKeys"] == [ + "ssh-ed25519 KEY test@pc" + ] + + +def test_graphql_get_main_user(authorized_client, one_user, mock_subprocess_popen): + response = authorized_client.post( + "/graphql", + json={ + "query": API_GET_USERS, + "variables": { + "username": "tester", + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + + assert len(response.json()["data"]["users"]["getUser"]) == 2 + assert response.json()["data"]["users"]["getUser"]["username"] == "tester" + assert response.json()["data"]["users"]["getUser"]["sshKeys"] == [ + "ssh-rsa KEY test@pc" + ] + + +def test_graphql_get_nonexistent_user( + authorized_client, one_user, mock_subprocess_popen +): + response = authorized_client.post( + "/graphql", + json={ + "query": API_GET_USERS, + "variables": { + "username": "tyler_durden", + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + + assert response.json()["data"]["users"]["getUser"] is None + + +API_CREATE_USERS_MUTATION = """ +mutation createUser($user: UserMutationInput!) { + createUser(user: $user) { + success + message + code + user { + username + sshKeys + } + } +} +""" + + +def test_graphql_add_user_unauthorize(client, one_user, mock_subprocess_popen): + response = client.post( + "/graphql", + json={ + "query": API_CREATE_USERS_MUTATION, + "variables": { + "user": { + "username": "user2", + "password": "12345678", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is None + + +def test_graphql_add_user(authorized_client, one_user, mock_subprocess_popen): + response = authorized_client.post( + "/graphql", + json={ + "query": API_CREATE_USERS_MUTATION, + "variables": { + "user": { + "username": "user2", + "password": "12345678", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + + assert response.json()["data"]["createUser"]["message"] is not None + assert response.json()["data"]["createUser"]["code"] == 201 + assert response.json()["data"]["createUser"]["success"] is True + + assert response.json()["data"]["createUser"]["user"]["username"] == "user2" + assert response.json()["data"]["createUser"]["user"]["sshKeys"] == [] + + +def test_graphql_add_undefined_settings( + authorized_client, undefined_settings, mock_subprocess_popen +): + response = authorized_client.post( + "/graphql", + json={ + "query": API_CREATE_USERS_MUTATION, + "variables": { + "user": { + "username": "user2", + "password": "12345678", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + + assert response.json()["data"]["createUser"]["message"] is not None + assert response.json()["data"]["createUser"]["code"] == 201 + assert response.json()["data"]["createUser"]["success"] is True + + assert response.json()["data"]["createUser"]["user"]["username"] == "user2" + assert response.json()["data"]["createUser"]["user"]["sshKeys"] == [] + + +def test_graphql_add_without_password( + authorized_client, one_user, mock_subprocess_popen +): + response = authorized_client.post( + "/graphql", + json={ + "query": API_CREATE_USERS_MUTATION, + "variables": { + "user": { + "username": "user2", + "password": "", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + + assert response.json()["data"]["createUser"]["message"] is not None + assert response.json()["data"]["createUser"]["code"] == 400 + assert response.json()["data"]["createUser"]["success"] is False + + assert response.json()["data"]["createUser"]["user"] is None + + +def test_graphql_add_without_both(authorized_client, one_user, mock_subprocess_popen): + response = authorized_client.post( + "/graphql", + json={ + "query": API_CREATE_USERS_MUTATION, + "variables": { + "user": { + "username": "", + "password": "", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + + assert response.json()["data"]["createUser"]["message"] is not None + assert response.json()["data"]["createUser"]["code"] == 400 + assert response.json()["data"]["createUser"]["success"] is False + + assert response.json()["data"]["createUser"]["user"] is None + + +@pytest.mark.parametrize("username", invalid_usernames) +def test_graphql_add_system_username( + authorized_client, one_user, mock_subprocess_popen, username +): + response = authorized_client.post( + "/graphql", + json={ + "query": API_CREATE_USERS_MUTATION, + "variables": { + "user": { + "username": username, + "password": "12345678", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + + assert response.json()["data"]["createUser"]["message"] is not None + assert response.json()["data"]["createUser"]["code"] == 409 + assert response.json()["data"]["createUser"]["success"] is False + + assert response.json()["data"]["createUser"]["user"] is None + + +def test_graphql_add_existing_user(authorized_client, one_user, mock_subprocess_popen): + response = authorized_client.post( + "/graphql", + json={ + "query": API_CREATE_USERS_MUTATION, + "variables": { + "user": { + "username": "user1", + "password": "12345678", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + + assert response.json()["data"]["createUser"]["message"] is not None + assert response.json()["data"]["createUser"]["code"] == 409 + assert response.json()["data"]["createUser"]["success"] is False + + assert response.json()["data"]["createUser"]["user"]["username"] == "user1" + assert ( + response.json()["data"]["createUser"]["user"]["sshKeys"][0] + == "ssh-rsa KEY user1@pc" + ) + + +def test_graphql_add_main_user(authorized_client, one_user, mock_subprocess_popen): + response = authorized_client.post( + "/graphql", + json={ + "query": API_CREATE_USERS_MUTATION, + "variables": { + "user": { + "username": "tester", + "password": "12345678", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + + assert response.json()["data"]["createUser"]["message"] is not None + assert response.json()["data"]["createUser"]["code"] == 409 + assert response.json()["data"]["createUser"]["success"] is False + + assert response.json()["data"]["createUser"]["user"]["username"] == "tester" + assert ( + response.json()["data"]["createUser"]["user"]["sshKeys"][0] + == "ssh-rsa KEY test@pc" + ) + + +def test_graphql_add_long_username(authorized_client, one_user, mock_subprocess_popen): + response = authorized_client.post( + "/graphql", + json={ + "query": API_CREATE_USERS_MUTATION, + "variables": { + "user": { + "username": "a" * 32, + "password": "12345678", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + + assert response.json()["data"]["createUser"]["message"] is not None + assert response.json()["data"]["createUser"]["code"] == 400 + assert response.json()["data"]["createUser"]["success"] is False + + assert response.json()["data"]["createUser"]["user"] is None + + +@pytest.mark.parametrize("username", ["", "1", "Ñ„Ñ‹Ñ€", "user1@", "^-^"]) +def test_graphql_add_invalid_username( + authorized_client, one_user, mock_subprocess_popen, username +): + response = authorized_client.post( + "/graphql", + json={ + "query": API_CREATE_USERS_MUTATION, + "variables": { + "user": { + "username": username, + "password": "12345678", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + + assert response.json()["data"]["createUser"]["message"] is not None + assert response.json()["data"]["createUser"]["code"] == 400 + assert response.json()["data"]["createUser"]["success"] is False + + assert response.json()["data"]["createUser"]["user"] is None + + +API_DELETE_USER_MUTATION = """ +mutation deleteUser($username: String!) { + deleteUser(username: $username) { + success + message + code + } +} +""" + + +def test_graphql_delete_user_unauthorized(client, some_users, mock_subprocess_popen): + response = client.post( + "/graphql", + json={ + "query": API_DELETE_USER_MUTATION, + "variables": {"username": "user1"}, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is None + + +def test_graphql_delete_user(authorized_client, some_users, mock_subprocess_popen): + response = authorized_client.post( + "/graphql", + json={ + "query": API_DELETE_USER_MUTATION, + "variables": {"username": "user1"}, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + + assert response.json()["data"]["deleteUser"]["code"] == 200 + assert response.json()["data"]["deleteUser"]["message"] is not None + assert response.json()["data"]["deleteUser"]["success"] is True + + +@pytest.mark.parametrize("username", ["", "def"]) +def test_graphql_delete_nonexistent_users( + authorized_client, some_users, mock_subprocess_popen, username +): + response = authorized_client.post( + "/graphql", + json={ + "query": API_DELETE_USER_MUTATION, + "variables": {"username": username}, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + + assert response.json()["data"]["deleteUser"]["code"] == 404 + assert response.json()["data"]["deleteUser"]["message"] is not None + assert response.json()["data"]["deleteUser"]["success"] is False + + +@pytest.mark.parametrize("username", invalid_usernames) +def test_graphql_delete_system_users( + authorized_client, some_users, mock_subprocess_popen, username +): + response = authorized_client.post( + "/graphql", + json={ + "query": API_DELETE_USER_MUTATION, + "variables": {"username": username}, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + + assert ( + response.json()["data"]["deleteUser"]["code"] == 404 + or response.json()["data"]["deleteUser"]["code"] == 400 + ) + assert response.json()["data"]["deleteUser"]["message"] is not None + assert response.json()["data"]["deleteUser"]["success"] is False + + +def test_graphql_delete_main_user(authorized_client, some_users, mock_subprocess_popen): + response = authorized_client.post( + "/graphql", + json={ + "query": API_DELETE_USER_MUTATION, + "variables": {"username": "tester"}, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + + assert response.json()["data"]["deleteUser"]["code"] == 400 + assert response.json()["data"]["deleteUser"]["message"] is not None + assert response.json()["data"]["deleteUser"]["success"] is False + + +API_UPDATE_USER_MUTATION = """ +mutation updateUser($user: UserMutationInput!) { + updateUser(user: $user) { + success + message + code + user { + username + sshKeys + } + } +} +""" + + +def test_graphql_update_user_unauthorized(client, some_users, mock_subprocess_popen): + response = client.post( + "/graphql", + json={ + "query": API_UPDATE_USER_MUTATION, + "variables": { + "user": { + "username": "user1", + "password": "12345678", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is None + + +def test_graphql_update_user(authorized_client, some_users, mock_subprocess_popen): + response = authorized_client.post( + "/graphql", + json={ + "query": API_UPDATE_USER_MUTATION, + "variables": { + "user": { + "username": "user1", + "password": "12345678", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + + assert response.json()["data"]["updateUser"]["code"] == 200 + assert response.json()["data"]["updateUser"]["message"] is not None + assert response.json()["data"]["updateUser"]["success"] is True + + assert response.json()["data"]["updateUser"]["user"]["username"] == "user1" + assert response.json()["data"]["updateUser"]["user"]["sshKeys"] == [ + "ssh-rsa KEY user1@pc" + ] + assert mock_subprocess_popen.call_count == 1 + + +def test_graphql_update_nonexistent_user( + authorized_client, some_users, mock_subprocess_popen +): + response = authorized_client.post( + "/graphql", + json={ + "query": API_UPDATE_USER_MUTATION, + "variables": { + "user": { + "username": "user666", + "password": "12345678", + }, + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + + assert response.json()["data"]["updateUser"]["code"] == 404 + assert response.json()["data"]["updateUser"]["message"] is not None + assert response.json()["data"]["updateUser"]["success"] is False + + assert response.json()["data"]["updateUser"]["user"] is None + assert mock_subprocess_popen.call_count == 1 diff --git a/tests/test_users/no_users.json b/tests/test_graphql/test_users/no_users.json similarity index 100% rename from tests/test_users/no_users.json rename to tests/test_graphql/test_users/no_users.json diff --git a/tests/test_users/one_user.json b/tests/test_graphql/test_users/one_user.json similarity index 100% rename from tests/test_users/one_user.json rename to tests/test_graphql/test_users/one_user.json diff --git a/tests/test_users/some_users.json b/tests/test_graphql/test_users/some_users.json similarity index 100% rename from tests/test_users/some_users.json rename to tests/test_graphql/test_users/some_users.json diff --git a/tests/test_users/undefined.json b/tests/test_graphql/test_users/undefined.json similarity index 100% rename from tests/test_users/undefined.json rename to tests/test_graphql/test_users/undefined.json diff --git a/tests/test_jobs.py b/tests/test_jobs.py new file mode 100644 index 0000000..87f1386 --- /dev/null +++ b/tests/test_jobs.py @@ -0,0 +1,50 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +import json +import pytest + +from selfprivacy_api.utils import WriteUserData, ReadUserData +from selfprivacy_api.jobs import Jobs, JobStatus + + +def test_jobs(authorized_client, jobs_file, shared_datadir): + jobs = Jobs() + assert jobs.get_jobs() == [] + + test_job = jobs.add( + type_id="test", + name="Test job", + description="This is a test job.", + status=JobStatus.CREATED, + status_text="Status text", + progress=0, + ) + + assert jobs.get_jobs() == [test_job] + + jobs.update( + job=test_job, + status=JobStatus.RUNNING, + status_text="Status text", + progress=50, + ) + + assert jobs.get_jobs() == [test_job] + + +@pytest.fixture +def mock_subprocess_run(mocker): + mock = mocker.patch("subprocess.run", autospec=True) + return mock + + +@pytest.fixture +def mock_shutil_move(mocker): + mock = mocker.patch("shutil.move", autospec=True) + return mock + + +@pytest.fixture +def mock_shutil_chown(mocker): + mock = mocker.patch("shutil.chown", autospec=True) + return mock diff --git a/tests/test_network_utils.py b/tests/test_network_utils.py new file mode 100644 index 0000000..0662584 --- /dev/null +++ b/tests/test_network_utils.py @@ -0,0 +1,85 @@ +#!/usr/bin/env python3 +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=missing-function-docstring +import subprocess +import pytest + +from selfprivacy_api.utils.network import get_ip4, get_ip6 + +OUTPUT_STRING = b""" +2: eth0: mtu 1500 qdisc fq_codel state UP group default qlen 1000 + link/ether 96:00:00:f1:34:ae brd ff:ff:ff:ff:ff:ff + altname enp0s3 + altname ens3 + inet 157.90.247.192/32 brd 157.90.247.192 scope global dynamic eth0 + valid_lft 46061sec preferred_lft 35261sec + inet6 fe80::9400:ff:fef1:34ae/64 scope link + valid_lft forever preferred_lft forever +""" + +FAILED_OUTPUT_STRING = b""" +Device "eth0" does not exist. +""" + + +@pytest.fixture +def ip_process_mock(mocker): + mock = mocker.patch( + "subprocess.check_output", autospec=True, return_value=OUTPUT_STRING + ) + return mock + + +@pytest.fixture +def failed_ip_process_mock(mocker): + mock = mocker.patch( + "subprocess.check_output", + autospec=True, + return_value=FAILED_OUTPUT_STRING, + ) + return mock + + +@pytest.fixture +def failed_subprocess_call(mocker): + mock = mocker.patch( + "subprocess.check_output", + autospec=True, + side_effect=subprocess.CalledProcessError( + returncode=1, cmd=["ip", "addr", "show", "dev", "eth0"] + ), + ) + return mock + + +def test_get_ip4(ip_process_mock): + """Test get IPv4 address""" + ip4 = get_ip4() + assert ip4 == "157.90.247.192" + + +def test_get_ip6(ip_process_mock): + """Test get IPv6 address""" + ip6 = get_ip6() + assert ip6 == "fe80::9400:ff:fef1:34ae" + + +def test_failed_get_ip4(failed_ip_process_mock): + ip4 = get_ip4() + assert ip4 is "" + + +def test_failed_get_ip6(failed_ip_process_mock): + ip6 = get_ip6() + assert ip6 is "" + + +def test_failed_subprocess_get_ip4(failed_subprocess_call): + ip4 = get_ip4() + assert ip4 is "" + + +def test_failed_subprocess_get_ip6(failed_subprocess_call): + ip6 = get_ip6() + assert ip6 is "" diff --git a/tests/test_rest_endpoints/data/jobs.json b/tests/test_rest_endpoints/data/jobs.json new file mode 100644 index 0000000..0967ef4 --- /dev/null +++ b/tests/test_rest_endpoints/data/jobs.json @@ -0,0 +1 @@ +{} diff --git a/tests/test_rest_endpoints/data/tokens.json b/tests/test_rest_endpoints/data/tokens.json new file mode 100644 index 0000000..9be9d02 --- /dev/null +++ b/tests/test_rest_endpoints/data/tokens.json @@ -0,0 +1,14 @@ +{ + "tokens": [ + { + "token": "TEST_TOKEN", + "name": "test_token", + "date": "2022-01-14 08:31:10.789314" + }, + { + "token": "TEST_TOKEN2", + "name": "test_token2", + "date": "2022-01-14 08:31:10.789314" + } + ] +} \ No newline at end of file diff --git a/tests/services/data/tokens.json b/tests/test_rest_endpoints/services/data/tokens.json similarity index 100% rename from tests/services/data/tokens.json rename to tests/test_rest_endpoints/services/data/tokens.json diff --git a/tests/services/test_bitwarden.py b/tests/test_rest_endpoints/services/test_bitwarden.py similarity index 100% rename from tests/services/test_bitwarden.py rename to tests/test_rest_endpoints/services/test_bitwarden.py diff --git a/tests/services/test_bitwarden/enable_undefined.json b/tests/test_rest_endpoints/services/test_bitwarden/enable_undefined.json similarity index 100% rename from tests/services/test_bitwarden/enable_undefined.json rename to tests/test_rest_endpoints/services/test_bitwarden/enable_undefined.json diff --git a/tests/services/test_bitwarden/turned_off.json b/tests/test_rest_endpoints/services/test_bitwarden/turned_off.json similarity index 100% rename from tests/services/test_bitwarden/turned_off.json rename to tests/test_rest_endpoints/services/test_bitwarden/turned_off.json diff --git a/tests/services/test_bitwarden/turned_on.json b/tests/test_rest_endpoints/services/test_bitwarden/turned_on.json similarity index 100% rename from tests/services/test_bitwarden/turned_on.json rename to tests/test_rest_endpoints/services/test_bitwarden/turned_on.json diff --git a/tests/services/test_bitwarden/undefined.json b/tests/test_rest_endpoints/services/test_bitwarden/undefined.json similarity index 100% rename from tests/services/test_bitwarden/undefined.json rename to tests/test_rest_endpoints/services/test_bitwarden/undefined.json diff --git a/tests/services/test_gitea.py b/tests/test_rest_endpoints/services/test_gitea.py similarity index 100% rename from tests/services/test_gitea.py rename to tests/test_rest_endpoints/services/test_gitea.py diff --git a/tests/services/test_gitea/enable_undefined.json b/tests/test_rest_endpoints/services/test_gitea/enable_undefined.json similarity index 100% rename from tests/services/test_gitea/enable_undefined.json rename to tests/test_rest_endpoints/services/test_gitea/enable_undefined.json diff --git a/tests/services/test_gitea/turned_off.json b/tests/test_rest_endpoints/services/test_gitea/turned_off.json similarity index 100% rename from tests/services/test_gitea/turned_off.json rename to tests/test_rest_endpoints/services/test_gitea/turned_off.json diff --git a/tests/services/test_gitea/turned_on.json b/tests/test_rest_endpoints/services/test_gitea/turned_on.json similarity index 100% rename from tests/services/test_gitea/turned_on.json rename to tests/test_rest_endpoints/services/test_gitea/turned_on.json diff --git a/tests/services/test_gitea/undefined.json b/tests/test_rest_endpoints/services/test_gitea/undefined.json similarity index 100% rename from tests/services/test_gitea/undefined.json rename to tests/test_rest_endpoints/services/test_gitea/undefined.json diff --git a/tests/services/test_mailserver.py b/tests/test_rest_endpoints/services/test_mailserver.py similarity index 91% rename from tests/services/test_mailserver.py rename to tests/test_rest_endpoints/services/test_mailserver.py index a9e5f12..36cf615 100644 --- a/tests/services/test_mailserver.py +++ b/tests/test_rest_endpoints/services/test_mailserver.py @@ -25,7 +25,7 @@ class NoFileMock(ProcessMock): def mock_subproccess_popen(mocker): mock = mocker.patch("subprocess.Popen", autospec=True, return_value=ProcessMock) mocker.patch( - "selfprivacy_api.resources.services.mailserver.get_domain", + "selfprivacy_api.rest.services.get_domain", autospec=True, return_value="example.com", ) @@ -37,7 +37,7 @@ def mock_subproccess_popen(mocker): def mock_no_file(mocker): mock = mocker.patch("subprocess.Popen", autospec=True, return_value=NoFileMock) mocker.patch( - "selfprivacy_api.resources.services.mailserver.get_domain", + "selfprivacy_api.rest.services.get_domain", autospec=True, return_value="example.com", ) @@ -67,7 +67,7 @@ def test_dkim_key(authorized_client, mock_subproccess_popen): """Test DKIM key""" response = authorized_client.get("/services/mailserver/dkim") assert response.status_code == 200 - assert base64.b64decode(response.data) == b"I am a DKIM key" + assert base64.b64decode(response.text) == b"I am a DKIM key" assert mock_subproccess_popen.call_args[0][0] == [ "cat", "/var/dkim/example.com.selector.txt", diff --git a/tests/services/test_nextcloud.py b/tests/test_rest_endpoints/services/test_nextcloud.py similarity index 100% rename from tests/services/test_nextcloud.py rename to tests/test_rest_endpoints/services/test_nextcloud.py diff --git a/tests/services/test_nextcloud/enable_undefined.json b/tests/test_rest_endpoints/services/test_nextcloud/enable_undefined.json similarity index 100% rename from tests/services/test_nextcloud/enable_undefined.json rename to tests/test_rest_endpoints/services/test_nextcloud/enable_undefined.json diff --git a/tests/services/test_nextcloud/turned_off.json b/tests/test_rest_endpoints/services/test_nextcloud/turned_off.json similarity index 100% rename from tests/services/test_nextcloud/turned_off.json rename to tests/test_rest_endpoints/services/test_nextcloud/turned_off.json diff --git a/tests/services/test_nextcloud/turned_on.json b/tests/test_rest_endpoints/services/test_nextcloud/turned_on.json similarity index 100% rename from tests/services/test_nextcloud/turned_on.json rename to tests/test_rest_endpoints/services/test_nextcloud/turned_on.json diff --git a/tests/services/test_nextcloud/undefined.json b/tests/test_rest_endpoints/services/test_nextcloud/undefined.json similarity index 100% rename from tests/services/test_nextcloud/undefined.json rename to tests/test_rest_endpoints/services/test_nextcloud/undefined.json diff --git a/tests/services/test_ocserv.py b/tests/test_rest_endpoints/services/test_ocserv.py similarity index 100% rename from tests/services/test_ocserv.py rename to tests/test_rest_endpoints/services/test_ocserv.py diff --git a/tests/services/test_ocserv/enable_undefined.json b/tests/test_rest_endpoints/services/test_ocserv/enable_undefined.json similarity index 100% rename from tests/services/test_ocserv/enable_undefined.json rename to tests/test_rest_endpoints/services/test_ocserv/enable_undefined.json diff --git a/tests/services/test_ocserv/turned_off.json b/tests/test_rest_endpoints/services/test_ocserv/turned_off.json similarity index 100% rename from tests/services/test_ocserv/turned_off.json rename to tests/test_rest_endpoints/services/test_ocserv/turned_off.json diff --git a/tests/services/test_ocserv/turned_on.json b/tests/test_rest_endpoints/services/test_ocserv/turned_on.json similarity index 100% rename from tests/services/test_ocserv/turned_on.json rename to tests/test_rest_endpoints/services/test_ocserv/turned_on.json diff --git a/tests/services/test_ocserv/undefined.json b/tests/test_rest_endpoints/services/test_ocserv/undefined.json similarity index 100% rename from tests/services/test_ocserv/undefined.json rename to tests/test_rest_endpoints/services/test_ocserv/undefined.json diff --git a/tests/services/test_pleroma.py b/tests/test_rest_endpoints/services/test_pleroma.py similarity index 100% rename from tests/services/test_pleroma.py rename to tests/test_rest_endpoints/services/test_pleroma.py diff --git a/tests/services/test_pleroma/enable_undefined.json b/tests/test_rest_endpoints/services/test_pleroma/enable_undefined.json similarity index 100% rename from tests/services/test_pleroma/enable_undefined.json rename to tests/test_rest_endpoints/services/test_pleroma/enable_undefined.json diff --git a/tests/services/test_pleroma/turned_off.json b/tests/test_rest_endpoints/services/test_pleroma/turned_off.json similarity index 100% rename from tests/services/test_pleroma/turned_off.json rename to tests/test_rest_endpoints/services/test_pleroma/turned_off.json diff --git a/tests/services/test_pleroma/turned_on.json b/tests/test_rest_endpoints/services/test_pleroma/turned_on.json similarity index 100% rename from tests/services/test_pleroma/turned_on.json rename to tests/test_rest_endpoints/services/test_pleroma/turned_on.json diff --git a/tests/services/test_pleroma/undefined.json b/tests/test_rest_endpoints/services/test_pleroma/undefined.json similarity index 100% rename from tests/services/test_pleroma/undefined.json rename to tests/test_rest_endpoints/services/test_pleroma/undefined.json diff --git a/tests/services/test_restic.py b/tests/test_rest_endpoints/services/test_restic.py similarity index 93% rename from tests/services/test_restic.py rename to tests/test_rest_endpoints/services/test_restic.py index 913362f..9502be5 100644 --- a/tests/services/test_restic.py +++ b/tests/test_rest_endpoints/services/test_restic.py @@ -43,7 +43,7 @@ class ResticControllerMock: @pytest.fixture def mock_restic_controller(mocker): mock = mocker.patch( - "selfprivacy_api.resources.services.restic.ResticController", + "selfprivacy_api.rest.services.ResticController", autospec=True, return_value=ResticControllerMock, ) @@ -60,7 +60,7 @@ class ResticControllerMockNoKey: @pytest.fixture def mock_restic_controller_no_key(mocker): mock = mocker.patch( - "selfprivacy_api.resources.services.restic.ResticController", + "selfprivacy_api.rest.services.ResticController", autospec=True, return_value=ResticControllerMockNoKey, ) @@ -77,7 +77,7 @@ class ResticControllerNotInitialized: @pytest.fixture def mock_restic_controller_not_initialized(mocker): mock = mocker.patch( - "selfprivacy_api.resources.services.restic.ResticController", + "selfprivacy_api.rest.services.ResticController", autospec=True, return_value=ResticControllerNotInitialized, ) @@ -94,7 +94,7 @@ class ResticControllerInitializing: @pytest.fixture def mock_restic_controller_initializing(mocker): mock = mocker.patch( - "selfprivacy_api.resources.services.restic.ResticController", + "selfprivacy_api.rest.services.ResticController", autospec=True, return_value=ResticControllerInitializing, ) @@ -111,7 +111,7 @@ class ResticControllerBackingUp: @pytest.fixture def mock_restic_controller_backing_up(mocker): mock = mocker.patch( - "selfprivacy_api.resources.services.restic.ResticController", + "selfprivacy_api.rest.services.ResticController", autospec=True, return_value=ResticControllerBackingUp, ) @@ -128,7 +128,7 @@ class ResticControllerError: @pytest.fixture def mock_restic_controller_error(mocker): mock = mocker.patch( - "selfprivacy_api.resources.services.restic.ResticController", + "selfprivacy_api.rest.services.ResticController", autospec=True, return_value=ResticControllerError, ) @@ -145,7 +145,7 @@ class ResticControllerRestoring: @pytest.fixture def mock_restic_controller_restoring(mocker): mock = mocker.patch( - "selfprivacy_api.resources.services.restic.ResticController", + "selfprivacy_api.rest.services.ResticController", autospec=True, return_value=ResticControllerRestoring, ) @@ -154,9 +154,7 @@ def mock_restic_controller_restoring(mocker): @pytest.fixture def mock_restic_tasks(mocker): - mock = mocker.patch( - "selfprivacy_api.resources.services.restic.restic_tasks", autospec=True - ) + mock = mocker.patch("selfprivacy_api.rest.services.restic_tasks", autospec=True) return mock @@ -197,7 +195,7 @@ def test_get_snapshots_unauthorized(client, mock_restic_controller, mock_restic_ def test_get_snapshots(authorized_client, mock_restic_controller, mock_restic_tasks): response = authorized_client.get("/services/restic/backup/list") assert response.status_code == 200 - assert response.get_json() == MOCKED_SNAPSHOTS + assert response.json() == MOCKED_SNAPSHOTS def test_create_backup_unauthorized(client, mock_restic_controller, mock_restic_tasks): @@ -247,7 +245,7 @@ def test_check_backup_status( ): response = authorized_client.get("/services/restic/backup/status") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "status": "INITIALIZED", "progress": 0, "error_message": None, @@ -259,7 +257,7 @@ def test_check_backup_status_no_key( ): response = authorized_client.get("/services/restic/backup/status") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "status": "NO_KEY", "progress": 0, "error_message": None, @@ -271,7 +269,7 @@ def test_check_backup_status_not_initialized( ): response = authorized_client.get("/services/restic/backup/status") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "status": "NOT_INITIALIZED", "progress": 0, "error_message": None, @@ -283,7 +281,7 @@ def test_check_backup_status_initializing( ): response = authorized_client.get("/services/restic/backup/status") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "status": "INITIALIZING", "progress": 0, "error_message": None, @@ -295,7 +293,7 @@ def test_check_backup_status_backing_up( ): response = authorized_client.get("/services/restic/backup/status") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "status": "BACKING_UP", "progress": 0.42, "error_message": None, @@ -307,7 +305,7 @@ def test_check_backup_status_error( ): response = authorized_client.get("/services/restic/backup/status") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "status": "ERROR", "progress": 0, "error_message": "Error message", @@ -319,7 +317,7 @@ def test_check_backup_status_restoring( ): response = authorized_client.get("/services/restic/backup/status") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "status": "RESTORING", "progress": 0, "error_message": None, @@ -346,7 +344,7 @@ def test_backup_restore_without_backup_id( authorized_client, mock_restic_controller, mock_restic_tasks ): response = authorized_client.put("/services/restic/backup/restore", json={}) - assert response.status_code == 400 + assert response.status_code == 422 assert mock_restic_tasks.restore_from_backup.call_count == 0 @@ -440,7 +438,7 @@ def test_set_backblaze_config_without_arguments( authorized_client, mock_restic_controller, mock_restic_tasks, some_settings ): response = authorized_client.put("/services/restic/backblaze/config") - assert response.status_code == 400 + assert response.status_code == 422 assert mock_restic_tasks.update_keys_from_userdata.call_count == 0 @@ -451,7 +449,7 @@ def test_set_backblaze_config_without_all_values( "/services/restic/backblaze/config", json={"accountId": "123", "applicationKey": "456"}, ) - assert response.status_code == 400 + assert response.status_code == 422 assert mock_restic_tasks.update_keys_from_userdata.call_count == 0 diff --git a/tests/services/test_restic/no_values.json b/tests/test_rest_endpoints/services/test_restic/no_values.json similarity index 100% rename from tests/services/test_restic/no_values.json rename to tests/test_rest_endpoints/services/test_restic/no_values.json diff --git a/tests/services/test_restic/some_values.json b/tests/test_rest_endpoints/services/test_restic/some_values.json similarity index 100% rename from tests/services/test_restic/some_values.json rename to tests/test_rest_endpoints/services/test_restic/some_values.json diff --git a/tests/services/test_restic/undefined.json b/tests/test_rest_endpoints/services/test_restic/undefined.json similarity index 100% rename from tests/services/test_restic/undefined.json rename to tests/test_rest_endpoints/services/test_restic/undefined.json diff --git a/tests/services/test_services.py b/tests/test_rest_endpoints/services/test_services.py similarity index 70% rename from tests/services/test_services.py rename to tests/test_rest_endpoints/services/test_services.py index aed48fb..1108e8c 100644 --- a/tests/services/test_services.py +++ b/tests/test_rest_endpoints/services/test_services.py @@ -9,76 +9,81 @@ def read_json(file_path): def call_args_asserts(mocked_object): - assert mocked_object.call_count == 8 + assert mocked_object.call_count == 7 assert mocked_object.call_args_list[0][0][0] == [ "systemctl", - "status", + "show", "dovecot2.service", ] assert mocked_object.call_args_list[1][0][0] == [ "systemctl", - "status", + "show", "postfix.service", ] assert mocked_object.call_args_list[2][0][0] == [ "systemctl", - "status", - "nginx.service", + "show", + "vaultwarden.service", ] assert mocked_object.call_args_list[3][0][0] == [ "systemctl", - "status", - "vaultwarden.service", + "show", + "gitea.service", ] assert mocked_object.call_args_list[4][0][0] == [ "systemctl", - "status", - "gitea.service", + "show", + "phpfpm-nextcloud.service", ] assert mocked_object.call_args_list[5][0][0] == [ "systemctl", - "status", - "phpfpm-nextcloud.service", + "show", + "ocserv.service", ] assert mocked_object.call_args_list[6][0][0] == [ "systemctl", - "status", - "ocserv.service", - ] - assert mocked_object.call_args_list[7][0][0] == [ - "systemctl", - "status", + "show", "pleroma.service", ] -class ProcessMock: - """Mock subprocess.Popen""" +SUCCESSFUL_STATUS = b""" +Type=oneshot +ExitType=main +Restart=no +NotifyAccess=none +RestartUSec=100ms +LoadState=loaded +ActiveState=active +FreezerState=running +SubState=exited +""" - def __init__(self, args, **kwargs): - self.args = args - self.kwargs = kwargs - - def communicate(): - return (b"", None) - - returncode = 0 - - -class BrokenServiceMock(ProcessMock): - returncode = 3 +FAILED_STATUS = b""" +Type=oneshot +ExitType=main +Restart=no +NotifyAccess=none +RestartUSec=100ms +LoadState=loaded +ActiveState=failed +FreezerState=running +SubState=exited +""" @pytest.fixture def mock_subproccess_popen(mocker): - mock = mocker.patch("subprocess.Popen", autospec=True, return_value=ProcessMock) + mock = mocker.patch( + "subprocess.check_output", autospec=True, return_value=SUCCESSFUL_STATUS + ) return mock @pytest.fixture def mock_broken_service(mocker): mock = mocker.patch( - "subprocess.Popen", autospec=True, return_value=BrokenServiceMock + "subprocess.check_output", autospec=True, return_value=FAILED_STATUS ) return mock @@ -104,7 +109,7 @@ def test_illegal_methods(authorized_client, mock_subproccess_popen): def test_dkim_key(authorized_client, mock_subproccess_popen): response = authorized_client.get("/services/status") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "imap": 0, "smtp": 0, "http": 0, @@ -120,14 +125,14 @@ def test_dkim_key(authorized_client, mock_subproccess_popen): def test_no_dkim_key(authorized_client, mock_broken_service): response = authorized_client.get("/services/status") assert response.status_code == 200 - assert response.get_json() == { - "imap": 3, - "smtp": 3, - "http": 3, - "bitwarden": 3, - "gitea": 3, - "nextcloud": 3, - "ocserv": 3, - "pleroma": 3, + assert response.json() == { + "imap": 1, + "smtp": 1, + "http": 0, + "bitwarden": 1, + "gitea": 1, + "nextcloud": 1, + "ocserv": 1, + "pleroma": 1, } call_args_asserts(mock_broken_service) diff --git a/tests/services/test_ssh.py b/tests/test_rest_endpoints/services/test_ssh.py similarity index 91% rename from tests/services/test_ssh.py rename to tests/test_rest_endpoints/services/test_ssh.py index 5975811..a17bdab 100644 --- a/tests/services/test_ssh.py +++ b/tests/test_rest_endpoints/services/test_ssh.py @@ -95,14 +95,18 @@ def some_users(mocker, datadir): ## TEST 401 ###################################################### -@pytest.mark.parametrize( - "endpoint", ["ssh", "ssh/enable", "ssh/key/send", "ssh/keys/user"] -) +@pytest.mark.parametrize("endpoint", ["ssh/enable", "ssh/keys/user"]) def test_unauthorized(client, ssh_off, endpoint): response = client.post(f"/services/{endpoint}") assert response.status_code == 401 +@pytest.mark.parametrize("endpoint", ["ssh", "ssh/key/send"]) +def test_unauthorized_put(client, ssh_off, endpoint): + response = client.put(f"/services/{endpoint}") + assert response.status_code == 401 + + ## TEST ENABLE ###################################################### @@ -133,31 +137,31 @@ def test_legacy_enable_when_enabled(authorized_client, ssh_on): def test_get_current_settings_ssh_off(authorized_client, ssh_off): response = authorized_client.get("/services/ssh") assert response.status_code == 200 - assert response.json == {"enable": False, "passwordAuthentication": True} + assert response.json() == {"enable": False, "passwordAuthentication": True} def test_get_current_settings_ssh_on(authorized_client, ssh_on): response = authorized_client.get("/services/ssh") assert response.status_code == 200 - assert response.json == {"enable": True, "passwordAuthentication": True} + assert response.json() == {"enable": True, "passwordAuthentication": True} def test_get_current_settings_all_off(authorized_client, all_off): response = authorized_client.get("/services/ssh") assert response.status_code == 200 - assert response.json == {"enable": False, "passwordAuthentication": False} + assert response.json() == {"enable": False, "passwordAuthentication": False} def test_get_current_settings_undefined(authorized_client, undefined_settings): response = authorized_client.get("/services/ssh") assert response.status_code == 200 - assert response.json == {"enable": True, "passwordAuthentication": True} + assert response.json() == {"enable": True, "passwordAuthentication": True} def test_get_current_settings_mostly_undefined(authorized_client, undefined_values): response = authorized_client.get("/services/ssh") assert response.status_code == 200 - assert response.json == {"enable": True, "passwordAuthentication": True} + assert response.json() == {"enable": True, "passwordAuthentication": True} ## PUT ON /ssh ###################################################### @@ -275,29 +279,22 @@ def test_add_invalid_root_key(authorized_client, ssh_on): ## /ssh/keys/{user} ###################################################### -def test_add_root_key_via_wrong_endpoint(authorized_client, ssh_on): - response = authorized_client.post( - "/services/ssh/keys/root", json={"public_key": "ssh-rsa KEY test@pc"} - ) - assert response.status_code == 400 - - def test_get_root_key(authorized_client, root_and_admin_have_keys): response = authorized_client.get("/services/ssh/keys/root") assert response.status_code == 200 - assert response.json == ["ssh-ed25519 KEY test@pc"] + assert response.json() == ["ssh-ed25519 KEY test@pc"] def test_get_root_key_when_none(authorized_client, ssh_on): response = authorized_client.get("/services/ssh/keys/root") assert response.status_code == 200 - assert response.json == [] + assert response.json() == [] def test_get_root_key_on_undefined(authorized_client, undefined_settings): response = authorized_client.get("/services/ssh/keys/root") assert response.status_code == 200 - assert response.json == [] + assert response.json() == [] def test_delete_root_key(authorized_client, root_and_admin_have_keys): @@ -310,6 +307,10 @@ def test_delete_root_key(authorized_client, root_and_admin_have_keys): not in read_json(root_and_admin_have_keys / "root_and_admin_have_keys.json")[ "ssh" ] + or read_json(root_and_admin_have_keys / "root_and_admin_have_keys.json")["ssh"][ + "rootKeys" + ] + == [] ) @@ -330,19 +331,19 @@ def test_delete_root_key_on_undefined(authorized_client, undefined_settings): "/services/ssh/keys/root", json={"public_key": "ssh-ed25519 KEY test@pc"} ) assert response.status_code == 404 - assert read_json(undefined_settings / "undefined.json")["ssh"]["rootKeys"] == [] + assert "ssh" not in read_json(undefined_settings / "undefined.json") def test_get_admin_key(authorized_client, root_and_admin_have_keys): response = authorized_client.get("/services/ssh/keys/tester") assert response.status_code == 200 - assert response.json == ["ssh-rsa KEY test@pc"] + assert response.json() == ["ssh-rsa KEY test@pc"] def test_get_admin_key_when_none(authorized_client, ssh_on): response = authorized_client.get("/services/ssh/keys/tester") assert response.status_code == 200 - assert response.json == [] + assert response.json() == [] def test_delete_admin_key(authorized_client, root_and_admin_have_keys): @@ -371,7 +372,7 @@ def test_delete_admin_key_on_undefined(authorized_client, undefined_settings): "/services/ssh/keys/tester", json={"public_key": "ssh-rsa KEY test@pc"} ) assert response.status_code == 404 - assert read_json(undefined_settings / "undefined.json")["sshKeys"] == [] + assert "sshKeys" not in read_json(undefined_settings / "undefined.json") def test_add_admin_key(authorized_client, ssh_on): @@ -418,9 +419,9 @@ def test_get_user_key(authorized_client, some_users, user): response = authorized_client.get(f"/services/ssh/keys/user{user}") assert response.status_code == 200 if user == 1: - assert response.json == ["ssh-rsa KEY user1@pc"] + assert response.json() == ["ssh-rsa KEY user1@pc"] else: - assert response.json == [] + assert response.json() == [] def test_get_keys_of_nonexistent_user(authorized_client, some_users): @@ -483,7 +484,13 @@ def test_delete_nonexistent_user_key(authorized_client, some_users, user): f"/services/ssh/keys/user{user}", json={"public_key": "ssh-rsa KEY user1@pc"} ) assert response.status_code == 404 - assert read_json(some_users / "some_users.json")["users"][user - 1]["sshKeys"] == [] + if user == 2: + assert ( + read_json(some_users / "some_users.json")["users"][user - 1]["sshKeys"] + == [] + ) + if user == 3: + "sshKeys" not in read_json(some_users / "some_users.json")["users"][user - 1] def test_add_keys_of_nonexistent_user(authorized_client, some_users): diff --git a/tests/services/test_ssh/all_off.json b/tests/test_rest_endpoints/services/test_ssh/all_off.json similarity index 100% rename from tests/services/test_ssh/all_off.json rename to tests/test_rest_endpoints/services/test_ssh/all_off.json diff --git a/tests/services/test_ssh/root_and_admin_have_keys.json b/tests/test_rest_endpoints/services/test_ssh/root_and_admin_have_keys.json similarity index 100% rename from tests/services/test_ssh/root_and_admin_have_keys.json rename to tests/test_rest_endpoints/services/test_ssh/root_and_admin_have_keys.json diff --git a/tests/test_rest_endpoints/services/test_ssh/some_users.json b/tests/test_rest_endpoints/services/test_ssh/some_users.json new file mode 100644 index 0000000..569253a --- /dev/null +++ b/tests/test_rest_endpoints/services/test_ssh/some_users.json @@ -0,0 +1,71 @@ +{ + "backblaze": { + "accountId": "ID", + "accountKey": "KEY", + "bucket": "selfprivacy" + }, + "api": { + "token": "TEST_TOKEN", + "enableSwagger": false + }, + "bitwarden": { + "enable": false + }, + "cloudflare": { + "apiKey": "TOKEN" + }, + "databasePassword": "PASSWORD", + "domain": "test.tld", + "hashedMasterPassword": "HASHED_PASSWORD", + "hostname": "test-instance", + "nextcloud": { + "adminPassword": "ADMIN", + "databasePassword": "ADMIN", + "enable": true + }, + "resticPassword": "PASS", + "ssh": { + "enable": true, + "passwordAuthentication": true, + "rootKeys": [ + "ssh-ed25519 KEY test@pc" + ] + }, + "username": "tester", + "gitea": { + "enable": false + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true + }, + "autoUpgrade": { + "enable": true, + "allowReboot": true + }, + "timezone": "Europe/Moscow", + "sshKeys": [ + "ssh-rsa KEY test@pc" + ], + "users": [ + { + "username": "user1", + "hashedPassword": "HASHED_PASSWORD_1", + "sshKeys": [ + "ssh-rsa KEY user1@pc" + ] + }, + { + "username": "user2", + "hashedPassword": "HASHED_PASSWORD_2", + "sshKeys": [ + ] + }, + { + "username": "user3", + "hashedPassword": "HASHED_PASSWORD_3" + } + ] +} \ No newline at end of file diff --git a/tests/services/test_ssh/turned_off.json b/tests/test_rest_endpoints/services/test_ssh/turned_off.json similarity index 100% rename from tests/services/test_ssh/turned_off.json rename to tests/test_rest_endpoints/services/test_ssh/turned_off.json diff --git a/tests/services/test_ssh/turned_on.json b/tests/test_rest_endpoints/services/test_ssh/turned_on.json similarity index 100% rename from tests/services/test_ssh/turned_on.json rename to tests/test_rest_endpoints/services/test_ssh/turned_on.json diff --git a/tests/services/test_ssh/undefined.json b/tests/test_rest_endpoints/services/test_ssh/undefined.json similarity index 100% rename from tests/services/test_ssh/undefined.json rename to tests/test_rest_endpoints/services/test_ssh/undefined.json diff --git a/tests/services/test_ssh/undefined_values.json b/tests/test_rest_endpoints/services/test_ssh/undefined_values.json similarity index 100% rename from tests/services/test_ssh/undefined_values.json rename to tests/test_rest_endpoints/services/test_ssh/undefined_values.json diff --git a/tests/test_auth.py b/tests/test_rest_endpoints/test_auth.py similarity index 83% rename from tests/test_auth.py rename to tests/test_rest_endpoints/test_auth.py index 819a385..1083be5 100644 --- a/tests/test_auth.py +++ b/tests/test_rest_endpoints/test_auth.py @@ -1,11 +1,12 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument +# pylint: disable=missing-function-docstring import datetime -import json -import re import pytest from mnemonic import Mnemonic +from tests.common import read_json, write_json + TOKENS_FILE_CONTETS = { "tokens": [ @@ -22,25 +23,22 @@ TOKENS_FILE_CONTETS = { ] } - -def read_json(file_path): - with open(file_path, "r", encoding="utf-8") as file: - return json.load(file) - - -def write_json(file_path, data): - with open(file_path, "w", encoding="utf-8") as file: - json.dump(data, file, indent=4) +DATE_FORMATS = [ + "%Y-%m-%dT%H:%M:%S.%fZ", + "%Y-%m-%dT%H:%M:%S.%f", + "%Y-%m-%d %H:%M:%S.%fZ", + "%Y-%m-%d %H:%M:%S.%f", +] def test_get_tokens_info(authorized_client, tokens_file): response = authorized_client.get("/auth/tokens") assert response.status_code == 200 - assert response.json == [ - {"name": "test_token", "date": "2022-01-14 08:31:10.789314", "is_caller": True}, + assert response.json() == [ + {"name": "test_token", "date": "2022-01-14T08:31:10.789314", "is_caller": True}, { "name": "test_token2", - "date": "2022-01-14 08:31:10.789314", + "date": "2022-01-14T08:31:10.789314", "is_caller": False, }, ] @@ -98,7 +96,7 @@ def test_refresh_token_unauthorized(client, tokens_file): def test_refresh_token(authorized_client, tokens_file): response = authorized_client.post("/auth/tokens") assert response.status_code == 200 - new_token = response.json["token"] + new_token = response.json()["token"] assert read_json(tokens_file)["tokens"][0]["token"] == new_token @@ -106,7 +104,7 @@ def test_refresh_token(authorized_client, tokens_file): def test_get_new_device_auth_token_unauthorized(client, tokens_file): - response = client.get("/auth/new_device") + response = client.post("/auth/new_device") assert response.status_code == 401 assert read_json(tokens_file) == TOKENS_FILE_CONTETS @@ -114,19 +112,19 @@ def test_get_new_device_auth_token_unauthorized(client, tokens_file): def test_get_new_device_auth_token(authorized_client, tokens_file): response = authorized_client.post("/auth/new_device") assert response.status_code == 200 - assert "token" in response.json - token = Mnemonic(language="english").to_entropy(response.json["token"]).hex() + assert "token" in response.json() + token = Mnemonic(language="english").to_entropy(response.json()["token"]).hex() assert read_json(tokens_file)["new_device"]["token"] == token def test_get_and_delete_new_device_token(authorized_client, tokens_file): response = authorized_client.post("/auth/new_device") assert response.status_code == 200 - assert "token" in response.json - token = Mnemonic(language="english").to_entropy(response.json["token"]).hex() + assert "token" in response.json() + token = Mnemonic(language="english").to_entropy(response.json()["token"]).hex() assert read_json(tokens_file)["new_device"]["token"] == token response = authorized_client.delete( - "/auth/new_device", json={"token": response.json["token"]} + "/auth/new_device", json={"token": response.json()["token"]} ) assert response.status_code == 200 assert read_json(tokens_file) == TOKENS_FILE_CONTETS @@ -141,15 +139,15 @@ def test_delete_token_unauthenticated(client, tokens_file): def test_get_and_authorize_new_device(client, authorized_client, tokens_file): response = authorized_client.post("/auth/new_device") assert response.status_code == 200 - assert "token" in response.json - token = Mnemonic(language="english").to_entropy(response.json["token"]).hex() + assert "token" in response.json() + token = Mnemonic(language="english").to_entropy(response.json()["token"]).hex() assert read_json(tokens_file)["new_device"]["token"] == token response = client.post( "/auth/new_device/authorize", - json={"token": response.json["token"], "device": "new_device"}, + json={"token": response.json()["token"], "device": "new_device"}, ) assert response.status_code == 200 - assert read_json(tokens_file)["tokens"][2]["token"] == response.json["token"] + assert read_json(tokens_file)["tokens"][2]["token"] == response.json()["token"] assert read_json(tokens_file)["tokens"][2]["name"] == "new_device" @@ -165,19 +163,19 @@ def test_authorize_new_device_with_invalid_token(client, tokens_file): def test_get_and_authorize_used_token(client, authorized_client, tokens_file): response = authorized_client.post("/auth/new_device") assert response.status_code == 200 - assert "token" in response.json - token = Mnemonic(language="english").to_entropy(response.json["token"]).hex() + assert "token" in response.json() + token = Mnemonic(language="english").to_entropy(response.json()["token"]).hex() assert read_json(tokens_file)["new_device"]["token"] == token response = client.post( "/auth/new_device/authorize", - json={"token": response.json["token"], "device": "new_device"}, + json={"token": response.json()["token"], "device": "new_device"}, ) assert response.status_code == 200 - assert read_json(tokens_file)["tokens"][2]["token"] == response.json["token"] + assert read_json(tokens_file)["tokens"][2]["token"] == response.json()["token"] assert read_json(tokens_file)["tokens"][2]["name"] == "new_device" response = client.post( "/auth/new_device/authorize", - json={"token": response.json["token"], "device": "new_device"}, + json={"token": response.json()["token"], "device": "new_device"}, ) assert response.status_code == 404 @@ -187,8 +185,8 @@ def test_get_and_authorize_token_after_12_minutes( ): response = authorized_client.post("/auth/new_device") assert response.status_code == 200 - assert "token" in response.json - token = Mnemonic(language="english").to_entropy(response.json["token"]).hex() + assert "token" in response.json() + token = Mnemonic(language="english").to_entropy(response.json()["token"]).hex() assert read_json(tokens_file)["new_device"]["token"] == token file_data = read_json(tokens_file) @@ -199,7 +197,7 @@ def test_get_and_authorize_token_after_12_minutes( response = client.post( "/auth/new_device/authorize", - json={"token": response.json["token"], "device": "new_device"}, + json={"token": response.json()["token"], "device": "new_device"}, ) assert response.status_code == 404 @@ -209,7 +207,7 @@ def test_authorize_without_token(client, tokens_file): "/auth/new_device/authorize", json={"device": "new_device"}, ) - assert response.status_code == 400 + assert response.status_code == 422 assert read_json(tokens_file) == TOKENS_FILE_CONTETS @@ -245,7 +243,7 @@ def test_get_recovery_token_status_unauthorized(client, tokens_file): def test_get_recovery_token_when_none_exists(authorized_client, tokens_file): response = authorized_client.get("/auth/recovery_token") assert response.status_code == 200 - assert response.json == { + assert response.json() == { "exists": False, "valid": False, "date": None, @@ -259,8 +257,8 @@ def test_generate_recovery_token(authorized_client, client, tokens_file): # Generate token without expiration and uses_left response = authorized_client.post("/auth/recovery_token") assert response.status_code == 200 - assert "token" in response.json - mnemonic_token = response.json["token"] + assert "token" in response.json() + mnemonic_token = response.json()["token"] token = Mnemonic(language="english").to_entropy(mnemonic_token).hex() assert read_json(tokens_file)["recovery_token"]["token"] == token @@ -268,15 +266,15 @@ def test_generate_recovery_token(authorized_client, client, tokens_file): assert time_generated is not None # Assert that the token was generated near the current time assert ( - datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%fZ") + datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%f") - datetime.timedelta(seconds=5) < datetime.datetime.now() ) # Try to get token status - response = client.get("/auth/recovery_token") + response = authorized_client.get("/auth/recovery_token") assert response.status_code == 200 - assert response.json == { + assert response.json() == { "exists": True, "valid": True, "date": time_generated, @@ -290,7 +288,7 @@ def test_generate_recovery_token(authorized_client, client, tokens_file): json={"token": mnemonic_token, "device": "recovery_device"}, ) assert recovery_response.status_code == 200 - new_token = recovery_response.json["token"] + new_token = recovery_response.json()["token"] assert read_json(tokens_file)["tokens"][2]["token"] == new_token assert read_json(tokens_file)["tokens"][2]["name"] == "recovery_device" @@ -300,47 +298,49 @@ def test_generate_recovery_token(authorized_client, client, tokens_file): json={"token": mnemonic_token, "device": "recovery_device2"}, ) assert recovery_response.status_code == 200 - new_token = recovery_response.json["token"] + new_token = recovery_response.json()["token"] assert read_json(tokens_file)["tokens"][3]["token"] == new_token assert read_json(tokens_file)["tokens"][3]["name"] == "recovery_device2" +@pytest.mark.parametrize("timeformat", DATE_FORMATS) def test_generate_recovery_token_with_expiration_date( - authorized_client, client, tokens_file + authorized_client, client, tokens_file, timeformat ): # Generate token with expiration date # Generate expiration date in the future - # Expiration date format is YYYY-MM-DDTHH:MM:SS.SSSZ expiration_date = datetime.datetime.now() + datetime.timedelta(minutes=5) - expiration_date_str = expiration_date.strftime("%Y-%m-%dT%H:%M:%S.%fZ") + expiration_date_str = expiration_date.strftime(timeformat) response = authorized_client.post( "/auth/recovery_token", json={"expiration": expiration_date_str}, ) assert response.status_code == 200 - assert "token" in response.json - mnemonic_token = response.json["token"] + assert "token" in response.json() + mnemonic_token = response.json()["token"] token = Mnemonic(language="english").to_entropy(mnemonic_token).hex() assert read_json(tokens_file)["recovery_token"]["token"] == token - assert read_json(tokens_file)["recovery_token"]["expiration"] == expiration_date_str + assert datetime.datetime.strptime( + read_json(tokens_file)["recovery_token"]["expiration"], "%Y-%m-%dT%H:%M:%S.%f" + ) == datetime.datetime.strptime(expiration_date_str, timeformat) time_generated = read_json(tokens_file)["recovery_token"]["date"] assert time_generated is not None # Assert that the token was generated near the current time assert ( - datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%fZ") + datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%f") - datetime.timedelta(seconds=5) < datetime.datetime.now() ) # Try to get token status - response = client.get("/auth/recovery_token") + response = authorized_client.get("/auth/recovery_token") assert response.status_code == 200 - assert response.json == { + assert response.json() == { "exists": True, "valid": True, "date": time_generated, - "expiration": expiration_date_str, + "expiration": expiration_date.strftime("%Y-%m-%dT%H:%M:%S.%f"), "uses_left": None, } @@ -350,7 +350,7 @@ def test_generate_recovery_token_with_expiration_date( json={"token": mnemonic_token, "device": "recovery_device"}, ) assert recovery_response.status_code == 200 - new_token = recovery_response.json["token"] + new_token = recovery_response.json()["token"] assert read_json(tokens_file)["tokens"][2]["token"] == new_token assert read_json(tokens_file)["tokens"][2]["name"] == "recovery_device" @@ -360,14 +360,14 @@ def test_generate_recovery_token_with_expiration_date( json={"token": mnemonic_token, "device": "recovery_device2"}, ) assert recovery_response.status_code == 200 - new_token = recovery_response.json["token"] + new_token = recovery_response.json()["token"] assert read_json(tokens_file)["tokens"][3]["token"] == new_token assert read_json(tokens_file)["tokens"][3]["name"] == "recovery_device2" # Try to use token after expiration date new_data = read_json(tokens_file) new_data["recovery_token"]["expiration"] = datetime.datetime.now().strftime( - "%Y-%m-%dT%H:%M:%S.%fZ" + "%Y-%m-%dT%H:%M:%S.%f" ) write_json(tokens_file, new_data) recovery_response = client.post( @@ -379,9 +379,9 @@ def test_generate_recovery_token_with_expiration_date( assert read_json(tokens_file)["tokens"] == new_data["tokens"] # Get the status of the token - response = client.get("/auth/recovery_token") + response = authorized_client.get("/auth/recovery_token") assert response.status_code == 200 - assert response.json == { + assert response.json() == { "exists": True, "valid": False, "date": time_generated, @@ -390,12 +390,13 @@ def test_generate_recovery_token_with_expiration_date( } +@pytest.mark.parametrize("timeformat", DATE_FORMATS) def test_generate_recovery_token_with_expiration_in_the_past( - authorized_client, client, tokens_file + authorized_client, tokens_file, timeformat ): # Server must return 400 if expiration date is in the past - expiration_date = datetime.datetime.now() - datetime.timedelta(minutes=5) - expiration_date_str = expiration_date.strftime("%Y-%m-%dT%H:%M:%S.%fZ") + expiration_date = datetime.datetime.utcnow() - datetime.timedelta(minutes=5) + expiration_date_str = expiration_date.strftime(timeformat) response = authorized_client.post( "/auth/recovery_token", json={"expiration": expiration_date_str}, @@ -405,7 +406,7 @@ def test_generate_recovery_token_with_expiration_in_the_past( def test_generate_recovery_token_with_invalid_time_format( - authorized_client, client, tokens_file + authorized_client, tokens_file ): # Server must return 400 if expiration date is in the past expiration_date = "invalid_time_format" @@ -413,7 +414,7 @@ def test_generate_recovery_token_with_invalid_time_format( "/auth/recovery_token", json={"expiration": expiration_date}, ) - assert response.status_code == 400 + assert response.status_code == 422 assert "recovery_token" not in read_json(tokens_file) @@ -426,8 +427,8 @@ def test_generate_recovery_token_with_limited_uses( json={"uses": 2}, ) assert response.status_code == 200 - assert "token" in response.json - mnemonic_token = response.json["token"] + assert "token" in response.json() + mnemonic_token = response.json()["token"] token = Mnemonic(language="english").to_entropy(mnemonic_token).hex() assert read_json(tokens_file)["recovery_token"]["token"] == token assert read_json(tokens_file)["recovery_token"]["uses_left"] == 2 @@ -436,15 +437,15 @@ def test_generate_recovery_token_with_limited_uses( time_generated = read_json(tokens_file)["recovery_token"]["date"] assert time_generated is not None assert ( - datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%fZ") + datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%f") - datetime.timedelta(seconds=5) < datetime.datetime.now() ) # Try to get token status - response = client.get("/auth/recovery_token") + response = authorized_client.get("/auth/recovery_token") assert response.status_code == 200 - assert response.json == { + assert response.json() == { "exists": True, "valid": True, "date": time_generated, @@ -458,16 +459,16 @@ def test_generate_recovery_token_with_limited_uses( json={"token": mnemonic_token, "device": "recovery_device"}, ) assert recovery_response.status_code == 200 - new_token = recovery_response.json["token"] + new_token = recovery_response.json()["token"] assert read_json(tokens_file)["tokens"][2]["token"] == new_token assert read_json(tokens_file)["tokens"][2]["name"] == "recovery_device" assert read_json(tokens_file)["recovery_token"]["uses_left"] == 1 # Get the status of the token - response = client.get("/auth/recovery_token") + response = authorized_client.get("/auth/recovery_token") assert response.status_code == 200 - assert response.json == { + assert response.json() == { "exists": True, "valid": True, "date": time_generated, @@ -481,14 +482,14 @@ def test_generate_recovery_token_with_limited_uses( json={"token": mnemonic_token, "device": "recovery_device2"}, ) assert recovery_response.status_code == 200 - new_token = recovery_response.json["token"] + new_token = recovery_response.json()["token"] assert read_json(tokens_file)["tokens"][3]["token"] == new_token assert read_json(tokens_file)["tokens"][3]["name"] == "recovery_device2" # Get the status of the token - response = client.get("/auth/recovery_token") + response = authorized_client.get("/auth/recovery_token") assert response.status_code == 200 - assert response.json == { + assert response.json() == { "exists": True, "valid": False, "date": time_generated, diff --git a/tests/test_system.py b/tests/test_rest_endpoints/test_system.py similarity index 97% rename from tests/test_system.py rename to tests/test_rest_endpoints/test_system.py index 9ed2617..90c1499 100644 --- a/tests/test_system.py +++ b/tests/test_rest_endpoints/test_system.py @@ -67,6 +67,8 @@ class ProcessMock: class BrokenServiceMock(ProcessMock): + """Mock subprocess.Popen""" + def communicate(): return (b"Testing error", None) @@ -121,13 +123,13 @@ def test_get_timezone_unauthorized(client, turned_on): def test_get_timezone(authorized_client, turned_on): response = authorized_client.get("/system/configuration/timezone") assert response.status_code == 200 - assert response.get_json() == "Europe/Moscow" + assert response.json() == "Europe/Moscow" def test_get_timezone_on_undefined(authorized_client, undefined_config): response = authorized_client.get("/system/configuration/timezone") assert response.status_code == 200 - assert response.get_json() == "Europe/Uzhgorod" + assert response.json() == "Europe/Uzhgorod" def test_put_timezone_unauthorized(client, turned_on): @@ -157,7 +159,7 @@ def test_put_timezone_on_undefined(authorized_client, undefined_config): def test_put_timezone_without_timezone(authorized_client, turned_on): response = authorized_client.put("/system/configuration/timezone", json={}) - assert response.status_code == 400 + assert response.status_code == 422 assert read_json(turned_on / "turned_on.json")["timezone"] == "Europe/Moscow" @@ -180,7 +182,7 @@ def test_get_auto_upgrade_unauthorized(client, turned_on): def test_get_auto_upgrade(authorized_client, turned_on): response = authorized_client.get("/system/configuration/autoUpgrade") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "enable": True, "allowReboot": True, } @@ -189,7 +191,7 @@ def test_get_auto_upgrade(authorized_client, turned_on): def test_get_auto_upgrade_on_undefined(authorized_client, undefined_config): response = authorized_client.get("/system/configuration/autoUpgrade") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "enable": True, "allowReboot": False, } @@ -198,7 +200,7 @@ def test_get_auto_upgrade_on_undefined(authorized_client, undefined_config): def test_get_auto_upgrade_without_values(authorized_client, no_values): response = authorized_client.get("/system/configuration/autoUpgrade") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "enable": True, "allowReboot": False, } @@ -207,7 +209,7 @@ def test_get_auto_upgrade_without_values(authorized_client, no_values): def test_get_auto_upgrade_turned_off(authorized_client, turned_off): response = authorized_client.get("/system/configuration/autoUpgrade") assert response.status_code == 200 - assert response.get_json() == { + assert response.json() == { "enable": False, "allowReboot": False, } @@ -355,7 +357,7 @@ def test_get_system_version_unauthorized(client, mock_subprocess_check_output): def test_get_system_version(authorized_client, mock_subprocess_check_output): response = authorized_client.get("/system/version") assert response.status_code == 200 - assert response.get_json() == {"system_version": "Testing Linux"} + assert response.json() == {"system_version": "Testing Linux"} assert mock_subprocess_check_output.call_count == 1 assert mock_subprocess_check_output.call_args[0][0] == ["uname", "-a"] @@ -382,7 +384,7 @@ def test_get_python_version_unauthorized(client, mock_subprocess_check_output): def test_get_python_version(authorized_client, mock_subprocess_check_output): response = authorized_client.get("/system/pythonVersion") assert response.status_code == 200 - assert response.get_json() == "Testing Linux" + assert response.json() == "Testing Linux" assert mock_subprocess_check_output.call_count == 1 assert mock_subprocess_check_output.call_args[0][0] == ["python", "-V"] diff --git a/tests/test_rest_endpoints/test_system/domain b/tests/test_rest_endpoints/test_system/domain new file mode 100644 index 0000000..3679d0d --- /dev/null +++ b/tests/test_rest_endpoints/test_system/domain @@ -0,0 +1 @@ +test-domain.tld \ No newline at end of file diff --git a/tests/test_rest_endpoints/test_system/no_values.json b/tests/test_rest_endpoints/test_system/no_values.json new file mode 100644 index 0000000..59e5e71 --- /dev/null +++ b/tests/test_rest_endpoints/test_system/no_values.json @@ -0,0 +1,50 @@ +{ + "backblaze": { + "accountId": "ID", + "accountKey": "KEY", + "bucket": "selfprivacy" + }, + "api": { + "token": "TEST_TOKEN", + "enableSwagger": false + }, + "bitwarden": { + "enable": true + }, + "cloudflare": { + "apiKey": "TOKEN" + }, + "databasePassword": "PASSWORD", + "domain": "test.tld", + "hashedMasterPassword": "HASHED_PASSWORD", + "hostname": "test-instance", + "nextcloud": { + "adminPassword": "ADMIN", + "databasePassword": "ADMIN", + "enable": true + }, + "resticPassword": "PASS", + "ssh": { + "enable": true, + "passwordAuthentication": true, + "rootKeys": [ + "ssh-ed25519 KEY test@pc" + ] + }, + "username": "tester", + "gitea": { + "enable": false + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true + }, + "autoUpgrade": { + }, + "timezone": "Europe/Moscow", + "sshKeys": [ + "ssh-rsa KEY test@pc" + ] +} \ No newline at end of file diff --git a/tests/test_rest_endpoints/test_system/turned_off.json b/tests/test_rest_endpoints/test_system/turned_off.json new file mode 100644 index 0000000..f451683 --- /dev/null +++ b/tests/test_rest_endpoints/test_system/turned_off.json @@ -0,0 +1,52 @@ +{ + "backblaze": { + "accountId": "ID", + "accountKey": "KEY", + "bucket": "selfprivacy" + }, + "api": { + "token": "TEST_TOKEN", + "enableSwagger": false + }, + "bitwarden": { + "enable": true + }, + "cloudflare": { + "apiKey": "TOKEN" + }, + "databasePassword": "PASSWORD", + "domain": "test.tld", + "hashedMasterPassword": "HASHED_PASSWORD", + "hostname": "test-instance", + "nextcloud": { + "adminPassword": "ADMIN", + "databasePassword": "ADMIN", + "enable": true + }, + "resticPassword": "PASS", + "ssh": { + "enable": true, + "passwordAuthentication": true, + "rootKeys": [ + "ssh-ed25519 KEY test@pc" + ] + }, + "username": "tester", + "gitea": { + "enable": false + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true + }, + "autoUpgrade": { + "enable": false, + "allowReboot": false + }, + "timezone": "Europe/Moscow", + "sshKeys": [ + "ssh-rsa KEY test@pc" + ] +} \ No newline at end of file diff --git a/tests/test_system/turned_on.json b/tests/test_rest_endpoints/test_system/turned_on.json similarity index 100% rename from tests/test_system/turned_on.json rename to tests/test_rest_endpoints/test_system/turned_on.json diff --git a/tests/test_rest_endpoints/test_system/undefined.json b/tests/test_rest_endpoints/test_system/undefined.json new file mode 100644 index 0000000..b67b296 --- /dev/null +++ b/tests/test_rest_endpoints/test_system/undefined.json @@ -0,0 +1,47 @@ +{ + "backblaze": { + "accountId": "ID", + "accountKey": "KEY", + "bucket": "selfprivacy" + }, + "api": { + "token": "TEST_TOKEN", + "enableSwagger": false + }, + "bitwarden": { + "enable": true + }, + "cloudflare": { + "apiKey": "TOKEN" + }, + "databasePassword": "PASSWORD", + "domain": "test.tld", + "hashedMasterPassword": "HASHED_PASSWORD", + "hostname": "test-instance", + "nextcloud": { + "adminPassword": "ADMIN", + "databasePassword": "ADMIN", + "enable": true + }, + "resticPassword": "PASS", + "ssh": { + "enable": true, + "passwordAuthentication": true, + "rootKeys": [ + "ssh-ed25519 KEY test@pc" + ] + }, + "username": "tester", + "gitea": { + "enable": false + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true + }, + "sshKeys": [ + "ssh-rsa KEY test@pc" + ] +} \ No newline at end of file diff --git a/tests/test_users.py b/tests/test_rest_endpoints/test_users.py similarity index 93% rename from tests/test_users.py rename to tests/test_rest_endpoints/test_users.py index 9374ef2..ebb3eff 100644 --- a/tests/test_users.py +++ b/tests/test_rest_endpoints/test_users.py @@ -121,31 +121,31 @@ def test_get_users_unauthorized(client, some_users, mock_subprocess_popen): def test_get_some_users(authorized_client, some_users, mock_subprocess_popen): response = authorized_client.get("/users") assert response.status_code == 200 - assert response.json == ["user1", "user2", "user3"] + assert response.json() == ["user1", "user2", "user3"] def test_get_one_user(authorized_client, one_user, mock_subprocess_popen): response = authorized_client.get("/users") assert response.status_code == 200 - assert response.json == ["user1"] + assert response.json() == ["user1"] def test_get_one_user_with_main(authorized_client, one_user, mock_subprocess_popen): response = authorized_client.get("/users?withMainUser=true") assert response.status_code == 200 - assert response.json == ["tester", "user1"] + assert response.json().sort() == ["tester", "user1"].sort() def test_get_no_users(authorized_client, no_users, mock_subprocess_popen): response = authorized_client.get("/users") assert response.status_code == 200 - assert response.json == [] + assert response.json() == [] def test_get_no_users_with_main(authorized_client, no_users, mock_subprocess_popen): response = authorized_client.get("/users?withMainUser=true") assert response.status_code == 200 - assert response.json == ["tester"] + assert response.json() == ["tester"] def test_get_undefined_users( @@ -153,7 +153,7 @@ def test_get_undefined_users( ): response = authorized_client.get("/users") assert response.status_code == 200 - assert response.json == [] + assert response.json() == [] def test_post_users_unauthorized(client, some_users, mock_subprocess_popen): @@ -174,6 +174,7 @@ def test_post_one_user(authorized_client, one_user, mock_subprocess_popen): }, { "username": "user4", + "sshKeys": [], "hashedPassword": "NEW_HASHED", }, ] @@ -181,19 +182,19 @@ def test_post_one_user(authorized_client, one_user, mock_subprocess_popen): def test_post_without_username(authorized_client, one_user, mock_subprocess_popen): response = authorized_client.post("/users", json={"password": "password"}) - assert response.status_code == 400 + assert response.status_code == 422 def test_post_without_password(authorized_client, one_user, mock_subprocess_popen): response = authorized_client.post("/users", json={"username": "user4"}) - assert response.status_code == 400 + assert response.status_code == 422 def test_post_without_username_and_password( authorized_client, one_user, mock_subprocess_popen ): response = authorized_client.post("/users", json={}) - assert response.status_code == 400 + assert response.status_code == 422 @pytest.mark.parametrize("username", invalid_usernames) @@ -226,7 +227,7 @@ def test_post_user_to_undefined_users( ) assert response.status_code == 201 assert read_json(undefined_settings / "undefined.json")["users"] == [ - {"username": "user4", "hashedPassword": "NEW_HASHED"} + {"username": "user4", "sshKeys": [], "hashedPassword": "NEW_HASHED"} ] @@ -279,11 +280,6 @@ def test_delete_main_user(authorized_client, some_users, mock_subprocess_popen): assert response.status_code == 400 -def test_delete_without_argument(authorized_client, some_users, mock_subprocess_popen): - response = authorized_client.delete("/users/") - assert response.status_code == 404 - - def test_delete_just_delete(authorized_client, some_users, mock_subprocess_popen): response = authorized_client.delete("/users") assert response.status_code == 405 diff --git a/tests/test_rest_endpoints/test_users/no_users.json b/tests/test_rest_endpoints/test_users/no_users.json new file mode 100644 index 0000000..e5efe86 --- /dev/null +++ b/tests/test_rest_endpoints/test_users/no_users.json @@ -0,0 +1,54 @@ +{ + "backblaze": { + "accountId": "ID", + "accountKey": "KEY", + "bucket": "selfprivacy" + }, + "api": { + "token": "TEST_TOKEN", + "enableSwagger": false + }, + "bitwarden": { + "enable": false + }, + "cloudflare": { + "apiKey": "TOKEN" + }, + "databasePassword": "PASSWORD", + "domain": "test.tld", + "hashedMasterPassword": "HASHED_PASSWORD", + "hostname": "test-instance", + "nextcloud": { + "adminPassword": "ADMIN", + "databasePassword": "ADMIN", + "enable": true + }, + "resticPassword": "PASS", + "ssh": { + "enable": true, + "passwordAuthentication": true, + "rootKeys": [ + "ssh-ed25519 KEY test@pc" + ] + }, + "username": "tester", + "gitea": { + "enable": false + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true + }, + "autoUpgrade": { + "enable": true, + "allowReboot": true + }, + "timezone": "Europe/Moscow", + "sshKeys": [ + "ssh-rsa KEY test@pc" + ], + "users": [ + ] +} \ No newline at end of file diff --git a/tests/test_rest_endpoints/test_users/one_user.json b/tests/test_rest_endpoints/test_users/one_user.json new file mode 100644 index 0000000..5df2108 --- /dev/null +++ b/tests/test_rest_endpoints/test_users/one_user.json @@ -0,0 +1,61 @@ +{ + "backblaze": { + "accountId": "ID", + "accountKey": "KEY", + "bucket": "selfprivacy" + }, + "api": { + "token": "TEST_TOKEN", + "enableSwagger": false + }, + "bitwarden": { + "enable": false + }, + "cloudflare": { + "apiKey": "TOKEN" + }, + "databasePassword": "PASSWORD", + "domain": "test.tld", + "hashedMasterPassword": "HASHED_PASSWORD", + "hostname": "test-instance", + "nextcloud": { + "adminPassword": "ADMIN", + "databasePassword": "ADMIN", + "enable": true + }, + "resticPassword": "PASS", + "ssh": { + "enable": true, + "passwordAuthentication": true, + "rootKeys": [ + "ssh-ed25519 KEY test@pc" + ] + }, + "username": "tester", + "gitea": { + "enable": false + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true + }, + "autoUpgrade": { + "enable": true, + "allowReboot": true + }, + "timezone": "Europe/Moscow", + "sshKeys": [ + "ssh-rsa KEY test@pc" + ], + "users": [ + { + "username": "user1", + "hashedPassword": "HASHED_PASSWORD_1", + "sshKeys": [ + "ssh-rsa KEY user1@pc" + ] + } + ] +} \ No newline at end of file diff --git a/tests/test_rest_endpoints/test_users/some_users.json b/tests/test_rest_endpoints/test_users/some_users.json new file mode 100644 index 0000000..569253a --- /dev/null +++ b/tests/test_rest_endpoints/test_users/some_users.json @@ -0,0 +1,71 @@ +{ + "backblaze": { + "accountId": "ID", + "accountKey": "KEY", + "bucket": "selfprivacy" + }, + "api": { + "token": "TEST_TOKEN", + "enableSwagger": false + }, + "bitwarden": { + "enable": false + }, + "cloudflare": { + "apiKey": "TOKEN" + }, + "databasePassword": "PASSWORD", + "domain": "test.tld", + "hashedMasterPassword": "HASHED_PASSWORD", + "hostname": "test-instance", + "nextcloud": { + "adminPassword": "ADMIN", + "databasePassword": "ADMIN", + "enable": true + }, + "resticPassword": "PASS", + "ssh": { + "enable": true, + "passwordAuthentication": true, + "rootKeys": [ + "ssh-ed25519 KEY test@pc" + ] + }, + "username": "tester", + "gitea": { + "enable": false + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true + }, + "autoUpgrade": { + "enable": true, + "allowReboot": true + }, + "timezone": "Europe/Moscow", + "sshKeys": [ + "ssh-rsa KEY test@pc" + ], + "users": [ + { + "username": "user1", + "hashedPassword": "HASHED_PASSWORD_1", + "sshKeys": [ + "ssh-rsa KEY user1@pc" + ] + }, + { + "username": "user2", + "hashedPassword": "HASHED_PASSWORD_2", + "sshKeys": [ + ] + }, + { + "username": "user3", + "hashedPassword": "HASHED_PASSWORD_3" + } + ] +} \ No newline at end of file diff --git a/tests/test_rest_endpoints/test_users/undefined.json b/tests/test_rest_endpoints/test_users/undefined.json new file mode 100644 index 0000000..7b2cf8b --- /dev/null +++ b/tests/test_rest_endpoints/test_users/undefined.json @@ -0,0 +1,52 @@ +{ + "backblaze": { + "accountId": "ID", + "accountKey": "KEY", + "bucket": "selfprivacy" + }, + "api": { + "token": "TEST_TOKEN", + "enableSwagger": false + }, + "bitwarden": { + "enable": false + }, + "cloudflare": { + "apiKey": "TOKEN" + }, + "databasePassword": "PASSWORD", + "domain": "test.tld", + "hashedMasterPassword": "HASHED_PASSWORD", + "hostname": "test-instance", + "nextcloud": { + "adminPassword": "ADMIN", + "databasePassword": "ADMIN", + "enable": true + }, + "resticPassword": "PASS", + "ssh": { + "enable": true, + "passwordAuthentication": true, + "rootKeys": [ + "ssh-ed25519 KEY test@pc" + ] + }, + "username": "tester", + "gitea": { + "enable": false + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true + }, + "autoUpgrade": { + "enable": true, + "allowReboot": true + }, + "timezone": "Europe/Moscow", + "sshKeys": [ + "ssh-rsa KEY test@pc" + ] +} \ No newline at end of file