forked from SelfPrivacy/selfprivacy-rest-api
Compare commits
210 Commits
Author | SHA1 | Date |
---|---|---|
Houkime | d9b5710007 | |
Houkime | 4c61c874c4 | |
Houkime | 628d7ed4a8 | |
Houkime | c3d797b0ab | |
Houkime | 72681c5330 | |
Houkime | 496f0bc452 | |
Houkime | a8ac9e7c64 | |
Houkime | c624529670 | |
Houkime | dc13235332 | |
Houkime | eba1f19a55 | |
Houkime | cf87b3590b | |
Houkime | c8ccd415fe | |
Houkime | 59f5ec2e2e | |
Houkime | 5280aca544 | |
Houkime | 8e1e917bff | |
Houkime | 6b6cc33b8e | |
Houkime | 0160f4aac0 | |
Houkime | 5382558655 | |
Houkime | b6dff04f42 | |
Houkime | f742c256d7 | |
Houkime | 88508985c9 | |
Houkime | 8d15955124 | |
Houkime | 3e641b1107 | |
Houkime | e537dd5f45 | |
Houkime | a0bd3eb0ff | |
Houkime | c7222fd8b9 | |
Houkime | 93b02752df | |
Houkime | ff6128b96e | |
Houkime | e9934a2567 | |
Houkime | 74c36afc2d | |
Houkime | 37fd3574a1 | |
Houkime | e7f0d76001 | |
Houkime | e5083c308a | |
Houkime | c917c411fd | |
Houkime | 4582bfe6c4 | |
Houkime | c597850fa5 | |
Houkime | 70c82cf142 | |
Houkime | a8d91a7a63 | |
Inex Code | 33b53cb320 | |
Inex Code | d84b4e9ad6 | |
Houkime | 710ad7221b | |
Houkime | 91bbc3520b | |
Houkime | 7a27af9173 | |
Houkime | d3c337853e | |
Houkime | 2f0a7c35f3 | |
Houkime | cd1afb8464 | |
Houkime | a252c11e7d | |
Houkime | 406f5ee921 | |
Houkime | fc24290f1c | |
Houkime | b254e40961 | |
Houkime | 38f9eb825a | |
Houkime | 2a577aedb7 | |
Houkime | b526e0b6ad | |
Houkime | d55232735d | |
Houkime | 5bf7e81351 | |
Houkime | 6dd724b682 | |
Houkime | 8b19c9c013 | |
Houkime | 51cc35708e | |
Houkime | 243f9be225 | |
Houkime | cb46c5e43b | |
Houkime | 7b5fb4b2df | |
Houkime | 5812b57ced | |
Houkime | c5dc09d5dd | |
Houkime | cee0419a52 | |
Houkime | 40d331d01f | |
Houkime | 1768fe278a | |
Houkime | d930426f25 | |
Inex Code | e7a49e170d | |
Inex Code | b5ee542693 | |
Inex Code | 2db40650ad | |
def | 2fc635da71 | |
def | bcfb8e62e9 | |
Inex Code | 24353ca56a | |
Inex Code | 45c6133881 | |
Inex Code | cb403a94bd | |
Inex Code | 7b526b7bd9 | |
Inex Code | 34b1a47d04 | |
def | 5c30f80479 | |
Inex Code | 999dd95cab | |
Houkime | 67872d7c55 | |
Houkime | 3ecfb2eacb | |
Houkime | 450ff41ebd | |
Houkime | 8235c3595c | |
Houkime | 7d9bccf4ec | |
Houkime | 87ea88c50a | |
Houkime | 25326b75ca | |
Houkime | b3d6251d11 | |
Houkime | b11e5a5f77 | |
Houkime | 69577c2854 | |
Houkime | 7cf295450b | |
Houkime | f33d5155b0 | |
Houkime | 5d4ed73435 | |
Houkime | c037a12f4d | |
Houkime | 29723b9f3a | |
Houkime | cb1906144c | |
Houkime | 3f6aa9bd06 | |
Houkime | 5dedbda41f | |
Houkime | e817de6228 | |
Houkime | f928ca160a | |
Houkime | a2ac47b0f5 | |
houkime | 66480c9904 | |
Houkime | 5a1f64b1e7 | |
Houkime | 22a309466e | |
Houkime | 2a239e35ad | |
Houkime | 20410ec790 | |
Houkime | 3021584adc | |
Houkime | 16f71b0b09 | |
Houkime | 39277419ac | |
Houkime | d3bf867bb5 | |
Houkime | 009a89fa02 | |
Houkime | a97705ef25 | |
Houkime | ab70687c61 | |
Houkime | 5a25e2a270 | |
Houkime | 0ae7c43ebf | |
Houkime | 6f6a9f5ef0 | |
Houkime | fda5d315a9 | |
Houkime | 13e84e2697 | |
Houkime | eba1d01b3d | |
Houkime | 8dfb3eb936 | |
Houkime | 4579fec569 | |
Houkime | 257096084f | |
Houkime | bf6c230ae0 | |
Houkime | 95e200bfc5 | |
Houkime | 9ffd67fa19 | |
Houkime | b98ccb88d1 | |
Houkime | 3cb7f29593 | |
Houkime | e504585437 | |
Houkime | 647e02f25b | |
Houkime | ba6a5261fa | |
Houkime | d8e3cd67e0 | |
Houkime | 256c16fa9f | |
Houkime | f2fa47466b | |
Houkime | ca822cdf6f | |
Houkime | 2797c6f88f | |
Houkime | 4498003aca | |
Houkime | 772c0dfc64 | |
Houkime | 671203e990 | |
Houkime | 9a49067e53 | |
Houkime | 682cd4ae87 | |
Houkime | 572ec75c39 | |
Houkime | 27a7c24bc3 | |
Houkime | 4e60d1d37a | |
Houkime | ff264ec808 | |
Houkime | b856a2aad3 | |
Houkime | 0d748d7ab1 | |
Houkime | c12dca9d9b | |
Houkime | 4492bbe995 | |
Houkime | 84bfa333fa | |
Houkime | be13d6163e | |
Houkime | ce411e9291 | |
Houkime | cf7b7eb8a7 | |
Houkime | 3feebd5290 | |
Houkime | 73584872f0 | |
Houkime | dc778b545e | |
Houkime | f96d8b7d7c | |
Houkime | dd525fe723 | |
Houkime | b9c570720b | |
Houkime | 732e72d414 | |
Houkime | 6f400911fc | |
Houkime | c86eb8b786 | |
Houkime | fa54220327 | |
Houkime | b43c4014e2 | |
Houkime | db55685488 | |
Houkime | 3921d9fe4c | |
Houkime | 2e2d344f43 | |
Houkime | 55ad2484b8 | |
Houkime | 8a05a55b80 | |
Houkime | 4cfe0515ea | |
def | 16e0bd56ce | |
Inex Code | 9cf4d46c50 | |
Houkime | d47368cbe9 | |
Houkime | 063dfafc19 | |
Houkime | 5c86706f4b | |
Houkime | 14c4ae26ab | |
Inex Code | f0132266e9 | |
Houkime | 870d2c408d | |
Houkime | f51e378ff0 | |
Houkime | 7acbba9960 | |
Houkime | 106a083ca2 | |
Houkime | b6eeec23cc | |
Houkime | c21b6cb071 | |
Houkime | 144f95fb34 | |
Inex Code | 60919b88b9 | |
Inex Code | 57794c9535 | |
Houkime | 2e9cdf15ab | |
Inex Code | 3ce71b0993 | |
Houkime | d6ef01c0c7 | |
Houkime | 244851c7cc | |
Houkime | 5afa2338ca | |
def | e130d37033 | |
Houkime | f7b7e5a0be | |
Inex Code | 9ee0240bbd | |
Houkime | 41e5f89b7b | |
Houkime | 07af2e59be | |
Houkime | a723311b36 | |
Houkime | 589093b853 | |
Houkime | 6c6f45781c | |
Houkime | 2d7bc0f154 | |
Houkime | 5efa85f877 | |
Houkime | f4a7986cfa | |
Houkime | 2ec0548c09 | |
Houkime | dd15e0ab65 | |
Houkime | 9540e26ce1 | |
Houkime | 7d58eb3d92 | |
Houkime | decb98afe2 | |
Houkime | b0c26b876a | |
Inex Code | 27255cb533 | |
Inex Code | 83736e1e8f | |
Inex Code | 19a4ec5377 | |
Inex Code | 8cdacb73dd |
|
@ -5,12 +5,16 @@ name: default
|
|||
steps:
|
||||
- name: Run Tests and Generate Coverage Report
|
||||
commands:
|
||||
- kill $(ps aux | grep '[r]edis-server 127.0.0.1:6389' | awk '{print $2}')
|
||||
- redis-server --bind 127.0.0.1 --port 6389 >/dev/null &
|
||||
- coverage run -m pytest -q
|
||||
- coverage xml
|
||||
- sonar-scanner -Dsonar.projectKey=SelfPrivacy-REST-API -Dsonar.sources=. -Dsonar.host.url=http://analyzer.lan:9000 -Dsonar.login="$SONARQUBE_TOKEN"
|
||||
environment:
|
||||
SONARQUBE_TOKEN:
|
||||
from_secret: SONARQUBE_TOKEN
|
||||
USE_REDIS_PORT: 6389
|
||||
|
||||
|
||||
- name: Run Bandit Checks
|
||||
commands:
|
||||
|
|
|
@ -0,0 +1,88 @@
|
|||
# SelfPrivacy API contributors guide
|
||||
|
||||
Instructions for [VScode](https://code.visualstudio.com) or [VScodium](https://github.com/VSCodium/vscodium) under Unix-like platform.
|
||||
|
||||
1. **To get started, create an account for yourself on the** [**SelfPrivacy Gitea**](https://git.selfprivacy.org/user/sign_up). Proceed to fork
|
||||
the [repository](https://git.selfprivacy.org/SelfPrivacy/selfprivacy-rest-api), and clone it on your local computer:
|
||||
|
||||
```git clone https://git.selfprivacy.org/your_user_name/selfprivacy-rest-api```
|
||||
|
||||
2. **Install Nix**
|
||||
|
||||
```sh <(curl -L https://nixos.org/nix/install)```
|
||||
|
||||
For detailed installation information, please review and follow: [link](https://nixos.org/manual/nix/stable/installation/installing-binary.html#installing-a-binary-distribution).
|
||||
|
||||
3. **Change directory to the cloned repository and start a nix shell:**
|
||||
|
||||
```cd selfprivacy-rest-api && nix-shell```
|
||||
|
||||
Nix will install all of the necessary packages for development work, all further actions will take place only within nix-shell.
|
||||
|
||||
4. **Install these plugins for VScode/VScodium**
|
||||
|
||||
Required: ```ms-python.python```, ```ms-python.vscode-pylance```
|
||||
|
||||
Optional, but highly recommended: ```ms-python.black-formatter```, ```bbenoist.Nix```, ```ryanluker.vscode-coverage-gutters```
|
||||
|
||||
5. **Set the path to the python interpreter from the nix store.** To do this, execute the command:
|
||||
|
||||
```whereis python```
|
||||
|
||||
Copy the path that starts with ```/nix/store/``` and ends with ```env/bin/python```
|
||||
|
||||
```/nix/store/???-python3-3.9.??-env/bin/python```
|
||||
|
||||
Click on the python version selection in the lower right corner, and replace the path to the interpreter in the project with the one you copied from the terminal.
|
||||
|
||||
6. **Congratulations :) Now you can develop new changes and test the project locally in a Nix environment.**
|
||||
|
||||
## What do you need to know before starting development work?
|
||||
- RestAPI is no longer utilized, the project has moved to [GraphQL](https://graphql.org), however, the API functionality still works on Rest
|
||||
|
||||
|
||||
## What to do after making changes to the repository?
|
||||
|
||||
**Run unit tests** using ```pytest .```
|
||||
Make sure that all tests pass successfully and the API works correctly. For convenience, you can use the built-in VScode interface.
|
||||
|
||||
How to review the percentage of code coverage? Execute the command:
|
||||
|
||||
```coverage run -m pytest && coverage xml && coverage report```
|
||||
|
||||
Next, use the recommended extension ```ryanluker.vscode-coverage-gutters```, navigate to one of the test files, and click the "watch" button on the bottom panel of VScode.
|
||||
|
||||
**Format (linting) code**, we use [black](https://pypi.org/project/black/) formatting, enter
|
||||
```black .``` to automatically format files, or use the recommended extension.
|
||||
|
||||
**And please remember, we have adopted** [**commit naming convention**](https://www.conventionalcommits.org/en/v1.0.0/), follow the link for more information.
|
||||
|
||||
Please request a review from at least one of the other maintainers. If you are not sure who to request, request a review from SelfPrivacy/Devs team.
|
||||
|
||||
## Helpful links!
|
||||
|
||||
**SelfPrivacy Contributor chat :3**
|
||||
|
||||
- [**Telegram:** @selfprivacy_dev](https://t.me/selfprivacy_dev)
|
||||
- [**Matrix:** #dev:selfprivacy.org](https://matrix.to/#/#dev:selfprivacy.org)
|
||||
|
||||
**Helpful material to review:**
|
||||
|
||||
- [GraphQL Query Language Documentation](https://graphql.org/)
|
||||
- [Documentation Strawberry - python library for working with GraphQL](https://strawberry.rocks/docs/)
|
||||
- [Nix Documentation](https://nixos.org/guides/ad-hoc-developer-environments.html)
|
||||
|
||||
### Track your time
|
||||
|
||||
If you are working on a task, please track your time and add it to the commit message. For example:
|
||||
|
||||
```
|
||||
feat: add new feature
|
||||
|
||||
- did some work
|
||||
- did some more work
|
||||
|
||||
fixes #4, spent @1h30m
|
||||
```
|
||||
|
||||
[Timewarrior](https://timewarrior.net/) is a good tool for tracking time.
|
|
@ -1,21 +1,23 @@
|
|||
"""App tokens actions"""
|
||||
from datetime import datetime
|
||||
"""
|
||||
App tokens actions.
|
||||
The only actions on tokens that are accessible from APIs
|
||||
"""
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional
|
||||
from pydantic import BaseModel
|
||||
from mnemonic import Mnemonic
|
||||
|
||||
|
||||
from selfprivacy_api.utils.auth import (
|
||||
delete_token,
|
||||
generate_recovery_token,
|
||||
get_recovery_token_status,
|
||||
get_tokens_info,
|
||||
is_recovery_token_exists,
|
||||
is_recovery_token_valid,
|
||||
is_token_name_exists,
|
||||
is_token_name_pair_valid,
|
||||
refresh_token,
|
||||
get_token_name,
|
||||
from selfprivacy_api.repositories.tokens.redis_tokens_repository import (
|
||||
RedisTokensRepository,
|
||||
)
|
||||
from selfprivacy_api.repositories.tokens.exceptions import (
|
||||
TokenNotFound,
|
||||
RecoveryKeyNotFound,
|
||||
InvalidMnemonic,
|
||||
NewDeviceKeyNotFound,
|
||||
)
|
||||
|
||||
TOKEN_REPO = RedisTokensRepository()
|
||||
|
||||
|
||||
class TokenInfoWithIsCaller(BaseModel):
|
||||
|
@ -25,21 +27,32 @@ class TokenInfoWithIsCaller(BaseModel):
|
|||
date: datetime
|
||||
is_caller: bool
|
||||
|
||||
def _naive(date_time: datetime) -> datetime:
|
||||
if date_time is None:
|
||||
return None
|
||||
if date_time.tzinfo is not None:
|
||||
date_time.astimezone(timezone.utc)
|
||||
return date_time.replace(tzinfo=None)
|
||||
|
||||
def get_api_tokens_with_caller_flag(caller_token: str) -> list[TokenInfoWithIsCaller]:
|
||||
"""Get the tokens info"""
|
||||
caller_name = get_token_name(caller_token)
|
||||
tokens = get_tokens_info()
|
||||
caller_name = TOKEN_REPO.get_token_by_token_string(caller_token).device_name
|
||||
tokens = TOKEN_REPO.get_tokens()
|
||||
return [
|
||||
TokenInfoWithIsCaller(
|
||||
name=token.name,
|
||||
date=token.date,
|
||||
is_caller=token.name == caller_name,
|
||||
name=token.device_name,
|
||||
date=token.created_at,
|
||||
is_caller=token.device_name == caller_name,
|
||||
)
|
||||
for token in tokens
|
||||
]
|
||||
|
||||
|
||||
def is_token_valid(token) -> bool:
|
||||
"""Check if token is valid"""
|
||||
return TOKEN_REPO.is_token_valid(token)
|
||||
|
||||
|
||||
class NotFoundException(Exception):
|
||||
"""Not found exception"""
|
||||
|
||||
|
@ -50,19 +63,22 @@ class CannotDeleteCallerException(Exception):
|
|||
|
||||
def delete_api_token(caller_token: str, token_name: str) -> None:
|
||||
"""Delete the token"""
|
||||
if is_token_name_pair_valid(token_name, caller_token):
|
||||
if TOKEN_REPO.is_token_name_pair_valid(token_name, caller_token):
|
||||
raise CannotDeleteCallerException("Cannot delete caller's token")
|
||||
if not is_token_name_exists(token_name):
|
||||
if not TOKEN_REPO.is_token_name_exists(token_name):
|
||||
raise NotFoundException("Token not found")
|
||||
delete_token(token_name)
|
||||
token = TOKEN_REPO.get_token_by_name(token_name)
|
||||
TOKEN_REPO.delete_token(token)
|
||||
|
||||
|
||||
def refresh_api_token(caller_token: str) -> str:
|
||||
"""Refresh the token"""
|
||||
new_token = refresh_token(caller_token)
|
||||
if new_token is None:
|
||||
try:
|
||||
old_token = TOKEN_REPO.get_token_by_token_string(caller_token)
|
||||
new_token = TOKEN_REPO.refresh_token(old_token)
|
||||
except TokenNotFound:
|
||||
raise NotFoundException("Token not found")
|
||||
return new_token
|
||||
return new_token.token
|
||||
|
||||
|
||||
class RecoveryTokenStatus(BaseModel):
|
||||
|
@ -75,20 +91,21 @@ class RecoveryTokenStatus(BaseModel):
|
|||
uses_left: Optional[int] = None
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def get_api_recovery_token_status() -> RecoveryTokenStatus:
|
||||
"""Get the recovery token status"""
|
||||
if not is_recovery_token_exists():
|
||||
token = TOKEN_REPO.get_recovery_key()
|
||||
if token is None:
|
||||
return RecoveryTokenStatus(exists=False, valid=False)
|
||||
status = get_recovery_token_status()
|
||||
if status is None:
|
||||
return RecoveryTokenStatus(exists=False, valid=False)
|
||||
is_valid = is_recovery_token_valid()
|
||||
is_valid = TOKEN_REPO.is_recovery_key_valid()
|
||||
return RecoveryTokenStatus(
|
||||
exists=True,
|
||||
valid=is_valid,
|
||||
date=status["date"],
|
||||
expiration=status["expiration"],
|
||||
uses_left=status["uses_left"],
|
||||
date=_naive(token.created_at),
|
||||
expiration=_naive(token.expires_at),
|
||||
uses_left=token.uses_left,
|
||||
)
|
||||
|
||||
|
||||
|
@ -112,5 +129,46 @@ def get_new_api_recovery_key(
|
|||
if uses_left <= 0:
|
||||
raise InvalidUsesLeft("Uses must be greater than 0")
|
||||
|
||||
key = generate_recovery_token(expiration_date, uses_left)
|
||||
return key
|
||||
key = TOKEN_REPO.create_recovery_key(expiration_date, uses_left)
|
||||
mnemonic_phrase = Mnemonic(language="english").to_mnemonic(bytes.fromhex(key.key))
|
||||
return mnemonic_phrase
|
||||
|
||||
|
||||
def use_mnemonic_recovery_token(mnemonic_phrase, name):
|
||||
"""Use the recovery token by converting the mnemonic word list to a byte array.
|
||||
If the recovery token if invalid itself, return None
|
||||
If the binary representation of phrase not matches
|
||||
the byte array of the recovery token, return None.
|
||||
If the mnemonic phrase is valid then generate a device token and return it.
|
||||
Substract 1 from uses_left if it exists.
|
||||
mnemonic_phrase is a string representation of the mnemonic word list.
|
||||
"""
|
||||
try:
|
||||
token = TOKEN_REPO.use_mnemonic_recovery_key(mnemonic_phrase, name)
|
||||
return token.token
|
||||
except (RecoveryKeyNotFound, InvalidMnemonic):
|
||||
return None
|
||||
|
||||
|
||||
def delete_new_device_auth_token() -> None:
|
||||
TOKEN_REPO.delete_new_device_key()
|
||||
|
||||
|
||||
def get_new_device_auth_token() -> str:
|
||||
"""Generate and store a new device auth token which is valid for 10 minutes
|
||||
and return a mnemonic phrase representation
|
||||
"""
|
||||
key = TOKEN_REPO.get_new_device_key()
|
||||
return Mnemonic(language="english").to_mnemonic(bytes.fromhex(key.key))
|
||||
|
||||
|
||||
def use_new_device_auth_token(mnemonic_phrase, name) -> Optional[str]:
|
||||
"""Use the new device auth token by converting the mnemonic string to a byte array.
|
||||
If the mnemonic phrase is valid then generate a device token and return it.
|
||||
New device auth token must be deleted.
|
||||
"""
|
||||
try:
|
||||
token = TOKEN_REPO.use_mnemonic_new_device_key(mnemonic_phrase, name)
|
||||
return token.token
|
||||
except (NewDeviceKeyNotFound, InvalidMnemonic):
|
||||
return None
|
||||
|
|
|
@ -2,7 +2,7 @@ from fastapi import Depends, HTTPException, status
|
|||
from fastapi.security import APIKeyHeader
|
||||
from pydantic import BaseModel
|
||||
|
||||
from selfprivacy_api.utils.auth import is_token_valid
|
||||
from selfprivacy_api.actions.api_tokens import is_token_valid
|
||||
|
||||
|
||||
class TokenHeader(BaseModel):
|
||||
|
@ -27,4 +27,4 @@ async def get_token_header(
|
|||
|
||||
def get_api_version() -> str:
|
||||
"""Get API version"""
|
||||
return "2.0.9"
|
||||
return "2.1.0"
|
||||
|
|
|
@ -4,7 +4,7 @@ import typing
|
|||
from strawberry.permission import BasePermission
|
||||
from strawberry.types import Info
|
||||
|
||||
from selfprivacy_api.utils.auth import is_token_valid
|
||||
from selfprivacy_api.actions.api_tokens import is_token_valid
|
||||
|
||||
|
||||
class IsAuthenticated(BasePermission):
|
||||
|
|
|
@ -43,7 +43,7 @@ def job_to_api_job(job: Job) -> ApiJob:
|
|||
|
||||
def get_api_job_by_id(job_id: str) -> typing.Optional[ApiJob]:
|
||||
"""Get a job for GraphQL by its ID."""
|
||||
job = Jobs.get_instance().get_job(job_id)
|
||||
job = Jobs.get_job(job_id)
|
||||
if job is None:
|
||||
return None
|
||||
return job_to_api_job(job)
|
||||
|
|
|
@ -11,6 +11,11 @@ from selfprivacy_api.actions.api_tokens import (
|
|||
NotFoundException,
|
||||
delete_api_token,
|
||||
get_new_api_recovery_key,
|
||||
use_mnemonic_recovery_token,
|
||||
refresh_api_token,
|
||||
delete_new_device_auth_token,
|
||||
get_new_device_auth_token,
|
||||
use_new_device_auth_token,
|
||||
)
|
||||
from selfprivacy_api.graphql import IsAuthenticated
|
||||
from selfprivacy_api.graphql.mutations.mutation_interface import (
|
||||
|
@ -18,14 +23,6 @@ from selfprivacy_api.graphql.mutations.mutation_interface import (
|
|||
MutationReturnInterface,
|
||||
)
|
||||
|
||||
from selfprivacy_api.utils.auth import (
|
||||
delete_new_device_auth_token,
|
||||
get_new_device_auth_token,
|
||||
refresh_token,
|
||||
use_mnemonic_recoverery_token,
|
||||
use_new_device_auth_token,
|
||||
)
|
||||
|
||||
|
||||
@strawberry.type
|
||||
class ApiKeyMutationReturn(MutationReturnInterface):
|
||||
|
@ -98,50 +95,53 @@ class ApiMutations:
|
|||
self, input: UseRecoveryKeyInput
|
||||
) -> DeviceApiTokenMutationReturn:
|
||||
"""Use recovery key"""
|
||||
token = use_mnemonic_recoverery_token(input.key, input.deviceName)
|
||||
if token is None:
|
||||
token = use_mnemonic_recovery_token(input.key, input.deviceName)
|
||||
if token is not None:
|
||||
return DeviceApiTokenMutationReturn(
|
||||
success=True,
|
||||
message="Recovery key used",
|
||||
code=200,
|
||||
token=token,
|
||||
)
|
||||
else:
|
||||
return DeviceApiTokenMutationReturn(
|
||||
success=False,
|
||||
message="Recovery key not found",
|
||||
code=404,
|
||||
token=None,
|
||||
)
|
||||
return DeviceApiTokenMutationReturn(
|
||||
success=True,
|
||||
message="Recovery key used",
|
||||
code=200,
|
||||
token=token,
|
||||
)
|
||||
|
||||
@strawberry.mutation(permission_classes=[IsAuthenticated])
|
||||
def refresh_device_api_token(self, info: Info) -> DeviceApiTokenMutationReturn:
|
||||
"""Refresh device api token"""
|
||||
token = (
|
||||
token_string = (
|
||||
info.context["request"]
|
||||
.headers.get("Authorization", "")
|
||||
.replace("Bearer ", "")
|
||||
)
|
||||
if token is None:
|
||||
if token_string is None:
|
||||
return DeviceApiTokenMutationReturn(
|
||||
success=False,
|
||||
message="Token not found",
|
||||
code=404,
|
||||
token=None,
|
||||
)
|
||||
new_token = refresh_token(token)
|
||||
if new_token is None:
|
||||
|
||||
try:
|
||||
new_token = refresh_api_token(token_string)
|
||||
return DeviceApiTokenMutationReturn(
|
||||
success=True,
|
||||
message="Token refreshed",
|
||||
code=200,
|
||||
token=new_token,
|
||||
)
|
||||
except NotFoundException:
|
||||
return DeviceApiTokenMutationReturn(
|
||||
success=False,
|
||||
message="Token not found",
|
||||
code=404,
|
||||
token=None,
|
||||
)
|
||||
return DeviceApiTokenMutationReturn(
|
||||
success=True,
|
||||
message="Token refreshed",
|
||||
code=200,
|
||||
token=new_token,
|
||||
)
|
||||
|
||||
@strawberry.mutation(permission_classes=[IsAuthenticated])
|
||||
def delete_device_api_token(self, device: str, info: Info) -> GenericMutationReturn:
|
||||
|
|
|
@ -14,7 +14,7 @@ class JobMutations:
|
|||
@strawberry.mutation(permission_classes=[IsAuthenticated])
|
||||
def remove_job(self, job_id: str) -> GenericMutationReturn:
|
||||
"""Remove a job from the queue"""
|
||||
result = Jobs.get_instance().remove_by_uid(job_id)
|
||||
result = Jobs.remove_by_uid(job_id)
|
||||
if result:
|
||||
return GenericMutationReturn(
|
||||
success=True,
|
||||
|
|
|
@ -4,16 +4,12 @@ import datetime
|
|||
import typing
|
||||
import strawberry
|
||||
from strawberry.types import Info
|
||||
from selfprivacy_api.actions.api_tokens import get_api_tokens_with_caller_flag
|
||||
from selfprivacy_api.graphql import IsAuthenticated
|
||||
from selfprivacy_api.utils import parse_date
|
||||
from selfprivacy_api.dependencies import get_api_version as get_api_version_dependency
|
||||
|
||||
from selfprivacy_api.utils.auth import (
|
||||
get_recovery_token_status,
|
||||
is_recovery_token_exists,
|
||||
is_recovery_token_valid,
|
||||
from selfprivacy_api.actions.api_tokens import (
|
||||
get_api_tokens_with_caller_flag,
|
||||
get_api_recovery_token_status,
|
||||
)
|
||||
from selfprivacy_api.graphql import IsAuthenticated
|
||||
from selfprivacy_api.dependencies import get_api_version as get_api_version_dependency
|
||||
|
||||
|
||||
def get_api_version() -> str:
|
||||
|
@ -43,16 +39,8 @@ class ApiRecoveryKeyStatus:
|
|||
|
||||
def get_recovery_key_status() -> ApiRecoveryKeyStatus:
|
||||
"""Get recovery key status"""
|
||||
if not is_recovery_token_exists():
|
||||
return ApiRecoveryKeyStatus(
|
||||
exists=False,
|
||||
valid=False,
|
||||
creation_date=None,
|
||||
expiration_date=None,
|
||||
uses_left=None,
|
||||
)
|
||||
status = get_recovery_token_status()
|
||||
if status is None:
|
||||
status = get_api_recovery_token_status()
|
||||
if status is None or not status.exists:
|
||||
return ApiRecoveryKeyStatus(
|
||||
exists=False,
|
||||
valid=False,
|
||||
|
@ -62,12 +50,10 @@ def get_recovery_key_status() -> ApiRecoveryKeyStatus:
|
|||
)
|
||||
return ApiRecoveryKeyStatus(
|
||||
exists=True,
|
||||
valid=is_recovery_token_valid(),
|
||||
creation_date=parse_date(status["date"]),
|
||||
expiration_date=parse_date(status["expiration"])
|
||||
if status["expiration"] is not None
|
||||
else None,
|
||||
uses_left=status["uses_left"] if status["uses_left"] is not None else None,
|
||||
valid=status.valid,
|
||||
creation_date=status.date,
|
||||
expiration_date=status.expiration,
|
||||
uses_left=status.uses_left,
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -16,9 +16,9 @@ class Job:
|
|||
@strawberry.field
|
||||
def get_jobs(self) -> typing.List[ApiJob]:
|
||||
|
||||
Jobs.get_instance().get_jobs()
|
||||
Jobs.get_jobs()
|
||||
|
||||
return [job_to_api_job(job) for job in Jobs.get_instance().get_jobs()]
|
||||
return [job_to_api_job(job) for job in Jobs.get_jobs()]
|
||||
|
||||
@strawberry.field
|
||||
def get_job(self, job_id: str) -> typing.Optional[ApiJob]:
|
||||
|
|
|
@ -11,3 +11,9 @@ class DnsProvider(Enum):
|
|||
@strawberry.enum
|
||||
class ServerProvider(Enum):
|
||||
HETZNER = "HETZNER"
|
||||
DIGITALOCEAN = "DIGITALOCEAN"
|
||||
|
||||
|
||||
@strawberry.enum
|
||||
class BackupProvider(Enum):
|
||||
BACKBLAZE = "BACKBLAZE"
|
||||
|
|
|
@ -44,7 +44,7 @@ def get_system_domain_info() -> SystemDomainInfo:
|
|||
return SystemDomainInfo(
|
||||
domain=user_data["domain"],
|
||||
hostname=user_data["hostname"],
|
||||
provider=DnsProvider.CLOUDFLARE,
|
||||
provider=user_data["dns"]["provider"],
|
||||
)
|
||||
|
||||
|
||||
|
@ -133,7 +133,11 @@ class SystemProviderInfo:
|
|||
|
||||
def get_system_provider_info() -> SystemProviderInfo:
|
||||
"""Get system provider info"""
|
||||
return SystemProviderInfo(provider=ServerProvider.HETZNER, id="UNKNOWN")
|
||||
with ReadUserData() as user_data:
|
||||
return SystemProviderInfo(
|
||||
provider=user_data["server"]["provider"],
|
||||
id="UNKNOWN",
|
||||
)
|
||||
|
||||
|
||||
@strawberry.type
|
||||
|
|
|
@ -17,16 +17,14 @@ A job is a dictionary with the following keys:
|
|||
import typing
|
||||
import datetime
|
||||
from uuid import UUID
|
||||
import asyncio
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
import uuid
|
||||
from enum import Enum
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from selfprivacy_api.utils import ReadUserData, UserDataFiles, WriteUserData
|
||||
from selfprivacy_api.utils.redis_pool import RedisPool
|
||||
|
||||
JOB_EXPIRATION_SECONDS = 10 * 24 * 60 * 60 # ten days
|
||||
|
||||
|
||||
class JobStatus(Enum):
|
||||
|
@ -64,36 +62,14 @@ class Jobs:
|
|||
Jobs class.
|
||||
"""
|
||||
|
||||
__instance = None
|
||||
|
||||
@staticmethod
|
||||
def get_instance():
|
||||
"""
|
||||
Singleton method.
|
||||
"""
|
||||
if Jobs.__instance is None:
|
||||
Jobs()
|
||||
if Jobs.__instance is None:
|
||||
raise Exception("Couldn't init Jobs singleton!")
|
||||
return Jobs.__instance
|
||||
return Jobs.__instance
|
||||
|
||||
def __init__(self):
|
||||
"""
|
||||
Initialize the jobs list.
|
||||
"""
|
||||
if Jobs.__instance is not None:
|
||||
raise Exception("This class is a singleton!")
|
||||
else:
|
||||
Jobs.__instance = self
|
||||
|
||||
@staticmethod
|
||||
def reset() -> None:
|
||||
"""
|
||||
Reset the jobs list.
|
||||
"""
|
||||
with WriteUserData(UserDataFiles.JOBS) as user_data:
|
||||
user_data["jobs"] = []
|
||||
jobs = Jobs.get_jobs()
|
||||
for job in jobs:
|
||||
Jobs.remove(job)
|
||||
|
||||
@staticmethod
|
||||
def add(
|
||||
|
@ -121,32 +97,27 @@ class Jobs:
|
|||
error=None,
|
||||
result=None,
|
||||
)
|
||||
with WriteUserData(UserDataFiles.JOBS) as user_data:
|
||||
try:
|
||||
if "jobs" not in user_data:
|
||||
user_data["jobs"] = []
|
||||
user_data["jobs"].append(json.loads(job.json()))
|
||||
except json.decoder.JSONDecodeError:
|
||||
user_data["jobs"] = [json.loads(job.json())]
|
||||
redis = RedisPool().get_connection()
|
||||
_store_job_as_hash(redis, _redis_key_from_uuid(job.uid), job)
|
||||
return job
|
||||
|
||||
def remove(self, job: Job) -> None:
|
||||
@staticmethod
|
||||
def remove(job: Job) -> None:
|
||||
"""
|
||||
Remove a job from the jobs list.
|
||||
"""
|
||||
self.remove_by_uid(str(job.uid))
|
||||
Jobs.remove_by_uid(str(job.uid))
|
||||
|
||||
def remove_by_uid(self, job_uuid: str) -> bool:
|
||||
@staticmethod
|
||||
def remove_by_uid(job_uuid: str) -> bool:
|
||||
"""
|
||||
Remove a job from the jobs list.
|
||||
"""
|
||||
with WriteUserData(UserDataFiles.JOBS) as user_data:
|
||||
if "jobs" not in user_data:
|
||||
user_data["jobs"] = []
|
||||
for i, j in enumerate(user_data["jobs"]):
|
||||
if j["uid"] == job_uuid:
|
||||
del user_data["jobs"][i]
|
||||
return True
|
||||
redis = RedisPool().get_connection()
|
||||
key = _redis_key_from_uuid(job_uuid)
|
||||
if redis.exists(key):
|
||||
redis.delete(key)
|
||||
return True
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
|
@ -178,13 +149,12 @@ class Jobs:
|
|||
if status in (JobStatus.FINISHED, JobStatus.ERROR):
|
||||
job.finished_at = datetime.datetime.now()
|
||||
|
||||
with WriteUserData(UserDataFiles.JOBS) as user_data:
|
||||
if "jobs" not in user_data:
|
||||
user_data["jobs"] = []
|
||||
for i, j in enumerate(user_data["jobs"]):
|
||||
if j["uid"] == str(job.uid):
|
||||
user_data["jobs"][i] = json.loads(job.json())
|
||||
break
|
||||
redis = RedisPool().get_connection()
|
||||
key = _redis_key_from_uuid(job.uid)
|
||||
if redis.exists(key):
|
||||
_store_job_as_hash(redis, key, job)
|
||||
if status in (JobStatus.FINISHED, JobStatus.ERROR):
|
||||
redis.expire(key, JOB_EXPIRATION_SECONDS)
|
||||
|
||||
return job
|
||||
|
||||
|
@ -193,12 +163,10 @@ class Jobs:
|
|||
"""
|
||||
Get a job from the jobs list.
|
||||
"""
|
||||
with ReadUserData(UserDataFiles.JOBS) as user_data:
|
||||
if "jobs" not in user_data:
|
||||
user_data["jobs"] = []
|
||||
for job in user_data["jobs"]:
|
||||
if job["uid"] == uid:
|
||||
return Job(**job)
|
||||
redis = RedisPool().get_connection()
|
||||
key = _redis_key_from_uuid(uid)
|
||||
if redis.exists(key):
|
||||
return _job_from_hash(redis, key)
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
|
@ -206,23 +174,54 @@ class Jobs:
|
|||
"""
|
||||
Get the jobs list.
|
||||
"""
|
||||
with ReadUserData(UserDataFiles.JOBS) as user_data:
|
||||
try:
|
||||
if "jobs" not in user_data:
|
||||
user_data["jobs"] = []
|
||||
return [Job(**job) for job in user_data["jobs"]]
|
||||
except json.decoder.JSONDecodeError:
|
||||
return []
|
||||
redis = RedisPool().get_connection()
|
||||
job_keys = redis.keys("jobs:*")
|
||||
jobs = []
|
||||
for job_key in job_keys:
|
||||
job = _job_from_hash(redis, job_key)
|
||||
if job is not None:
|
||||
jobs.append(job)
|
||||
return jobs
|
||||
|
||||
@staticmethod
|
||||
def is_busy() -> bool:
|
||||
"""
|
||||
Check if there is a job running.
|
||||
"""
|
||||
with ReadUserData(UserDataFiles.JOBS) as user_data:
|
||||
if "jobs" not in user_data:
|
||||
user_data["jobs"] = []
|
||||
for job in user_data["jobs"]:
|
||||
if job["status"] == JobStatus.RUNNING.value:
|
||||
return True
|
||||
for job in Jobs.get_jobs():
|
||||
if job.status == JobStatus.RUNNING:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _redis_key_from_uuid(uuid_string):
|
||||
return "jobs:" + str(uuid_string)
|
||||
|
||||
|
||||
def _store_job_as_hash(redis, redis_key, model):
|
||||
for key, value in model.dict().items():
|
||||
if isinstance(value, uuid.UUID):
|
||||
value = str(value)
|
||||
if isinstance(value, datetime.datetime):
|
||||
value = value.isoformat()
|
||||
if isinstance(value, JobStatus):
|
||||
value = value.value
|
||||
redis.hset(redis_key, key, str(value))
|
||||
|
||||
|
||||
def _job_from_hash(redis, redis_key):
|
||||
if redis.exists(redis_key):
|
||||
job_dict = redis.hgetall(redis_key)
|
||||
for date in [
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"finished_at",
|
||||
]:
|
||||
if job_dict[date] != "None":
|
||||
job_dict[date] = datetime.datetime.fromisoformat(job_dict[date])
|
||||
for key in job_dict.keys():
|
||||
if job_dict[key] == "None":
|
||||
job_dict[key] = None
|
||||
|
||||
return Job(**job_dict)
|
||||
return None
|
||||
|
|
|
@ -5,7 +5,7 @@ from selfprivacy_api.jobs import JobStatus, Jobs
|
|||
|
||||
@huey.task()
|
||||
def test_job():
|
||||
job = Jobs.get_instance().add(
|
||||
job = Jobs.add(
|
||||
type_id="test",
|
||||
name="Test job",
|
||||
description="This is a test job.",
|
||||
|
@ -14,42 +14,42 @@ def test_job():
|
|||
progress=0,
|
||||
)
|
||||
time.sleep(5)
|
||||
Jobs.get_instance().update(
|
||||
Jobs.update(
|
||||
job=job,
|
||||
status=JobStatus.RUNNING,
|
||||
status_text="Performing pre-move checks...",
|
||||
progress=5,
|
||||
)
|
||||
time.sleep(5)
|
||||
Jobs.get_instance().update(
|
||||
Jobs.update(
|
||||
job=job,
|
||||
status=JobStatus.RUNNING,
|
||||
status_text="Performing pre-move checks...",
|
||||
progress=10,
|
||||
)
|
||||
time.sleep(5)
|
||||
Jobs.get_instance().update(
|
||||
Jobs.update(
|
||||
job=job,
|
||||
status=JobStatus.RUNNING,
|
||||
status_text="Performing pre-move checks...",
|
||||
progress=15,
|
||||
)
|
||||
time.sleep(5)
|
||||
Jobs.get_instance().update(
|
||||
Jobs.update(
|
||||
job=job,
|
||||
status=JobStatus.RUNNING,
|
||||
status_text="Performing pre-move checks...",
|
||||
progress=20,
|
||||
)
|
||||
time.sleep(5)
|
||||
Jobs.get_instance().update(
|
||||
Jobs.update(
|
||||
job=job,
|
||||
status=JobStatus.RUNNING,
|
||||
status_text="Performing pre-move checks...",
|
||||
progress=25,
|
||||
)
|
||||
time.sleep(5)
|
||||
Jobs.get_instance().update(
|
||||
Jobs.update(
|
||||
job=job,
|
||||
status=JobStatus.FINISHED,
|
||||
status_text="Job finished.",
|
||||
|
|
|
@ -8,7 +8,9 @@ at api.skippedMigrations in userdata.json and populating it
|
|||
with IDs of the migrations to skip.
|
||||
Adding DISABLE_ALL to that array disables the migrations module entirely.
|
||||
"""
|
||||
from selfprivacy_api.migrations.check_for_failed_binds_migration import CheckForFailedBindsMigration
|
||||
from selfprivacy_api.migrations.check_for_failed_binds_migration import (
|
||||
CheckForFailedBindsMigration,
|
||||
)
|
||||
from selfprivacy_api.utils import ReadUserData
|
||||
from selfprivacy_api.migrations.fix_nixos_config_branch import FixNixosConfigBranch
|
||||
from selfprivacy_api.migrations.create_tokens_json import CreateTokensJson
|
||||
|
@ -16,6 +18,8 @@ from selfprivacy_api.migrations.migrate_to_selfprivacy_channel import (
|
|||
MigrateToSelfprivacyChannel,
|
||||
)
|
||||
from selfprivacy_api.migrations.mount_volume import MountVolume
|
||||
from selfprivacy_api.migrations.providers import CreateProviderFields
|
||||
from selfprivacy_api.migrations.redis_tokens import LoadTokensToRedis
|
||||
|
||||
migrations = [
|
||||
FixNixosConfigBranch(),
|
||||
|
@ -23,6 +27,8 @@ migrations = [
|
|||
MigrateToSelfprivacyChannel(),
|
||||
MountVolume(),
|
||||
CheckForFailedBindsMigration(),
|
||||
CreateProviderFields(),
|
||||
LoadTokensToRedis(),
|
||||
]
|
||||
|
||||
|
||||
|
|
|
@ -15,7 +15,7 @@ class CheckForFailedBindsMigration(Migration):
|
|||
|
||||
def is_migration_needed(self):
|
||||
try:
|
||||
jobs = Jobs.get_instance().get_jobs()
|
||||
jobs = Jobs.get_jobs()
|
||||
# If there is a job with type_id "migrations.migrate_to_binds" and status is not "FINISHED",
|
||||
# then migration is needed and job is deleted
|
||||
for job in jobs:
|
||||
|
@ -33,13 +33,13 @@ class CheckForFailedBindsMigration(Migration):
|
|||
# Get info about existing volumes
|
||||
# Write info about volumes to userdata.json
|
||||
try:
|
||||
jobs = Jobs.get_instance().get_jobs()
|
||||
jobs = Jobs.get_jobs()
|
||||
for job in jobs:
|
||||
if (
|
||||
job.type_id == "migrations.migrate_to_binds"
|
||||
and job.status != JobStatus.FINISHED
|
||||
):
|
||||
Jobs.get_instance().remove(job)
|
||||
Jobs.remove(job)
|
||||
with WriteUserData() as userdata:
|
||||
userdata["useBinds"] = False
|
||||
print("Done")
|
||||
|
|
|
@ -0,0 +1,43 @@
|
|||
from selfprivacy_api.migrations.migration import Migration
|
||||
from selfprivacy_api.utils import ReadUserData, WriteUserData
|
||||
|
||||
|
||||
class CreateProviderFields(Migration):
|
||||
"""Unhardcode providers"""
|
||||
|
||||
def get_migration_name(self):
|
||||
return "create_provider_fields"
|
||||
|
||||
def get_migration_description(self):
|
||||
return "Add DNS, backup and server provider fields to enable user to choose between different clouds and to make the deployment adapt to these preferences."
|
||||
|
||||
def is_migration_needed(self):
|
||||
try:
|
||||
with ReadUserData() as userdata:
|
||||
return "dns" not in userdata
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return False
|
||||
|
||||
def migrate(self):
|
||||
# Write info about providers to userdata.json
|
||||
try:
|
||||
with WriteUserData() as userdata:
|
||||
userdata["dns"] = {
|
||||
"provider": "CLOUDFLARE",
|
||||
"apiKey": userdata["cloudflare"]["apiKey"],
|
||||
}
|
||||
userdata["server"] = {
|
||||
"provider": "HETZNER",
|
||||
}
|
||||
userdata["backup"] = {
|
||||
"provider": "BACKBLAZE",
|
||||
"accountId": userdata["backblaze"]["accountId"],
|
||||
"accountKey": userdata["backblaze"]["accountKey"],
|
||||
"bucket": userdata["backblaze"]["bucket"],
|
||||
}
|
||||
|
||||
print("Done")
|
||||
except Exception as e:
|
||||
print(e)
|
||||
print("Error migrating provider fields")
|
|
@ -0,0 +1,48 @@
|
|||
from selfprivacy_api.migrations.migration import Migration
|
||||
|
||||
from selfprivacy_api.repositories.tokens.json_tokens_repository import (
|
||||
JsonTokensRepository,
|
||||
)
|
||||
from selfprivacy_api.repositories.tokens.redis_tokens_repository import (
|
||||
RedisTokensRepository,
|
||||
)
|
||||
from selfprivacy_api.repositories.tokens.abstract_tokens_repository import (
|
||||
AbstractTokensRepository,
|
||||
)
|
||||
|
||||
|
||||
class LoadTokensToRedis(Migration):
|
||||
"""Load Json tokens into Redis"""
|
||||
|
||||
def get_migration_name(self):
|
||||
return "load_tokens_to_redis"
|
||||
|
||||
def get_migration_description(self):
|
||||
return "Loads access tokens and recovery keys from legacy json file into redis token storage"
|
||||
|
||||
def is_repo_empty(self, repo: AbstractTokensRepository) -> bool:
|
||||
if repo.get_tokens() != []:
|
||||
return False
|
||||
if repo.get_recovery_key() is not None:
|
||||
return False
|
||||
return True
|
||||
|
||||
def is_migration_needed(self):
|
||||
try:
|
||||
if not self.is_repo_empty(JsonTokensRepository()) and self.is_repo_empty(
|
||||
RedisTokensRepository()
|
||||
):
|
||||
return True
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return False
|
||||
|
||||
def migrate(self):
|
||||
# Write info about providers to userdata.json
|
||||
try:
|
||||
RedisTokensRepository().clone(JsonTokensRepository())
|
||||
|
||||
print("Done")
|
||||
except Exception as e:
|
||||
print(e)
|
||||
print("Error migrating access tokens from json to redis")
|
|
@ -0,0 +1,48 @@
|
|||
"""
|
||||
New device key used to obtain access token.
|
||||
"""
|
||||
from datetime import datetime, timedelta, timezone
|
||||
import secrets
|
||||
from pydantic import BaseModel
|
||||
from mnemonic import Mnemonic
|
||||
|
||||
from selfprivacy_api.models.tokens.time import is_past
|
||||
|
||||
|
||||
class NewDeviceKey(BaseModel):
|
||||
"""
|
||||
Recovery key used to obtain access token.
|
||||
|
||||
Recovery key has a key string, date of creation, date of expiration.
|
||||
"""
|
||||
|
||||
key: str
|
||||
created_at: datetime
|
||||
expires_at: datetime
|
||||
|
||||
def is_valid(self) -> bool:
|
||||
"""
|
||||
Check if the recovery key is valid.
|
||||
"""
|
||||
if is_past(self.expires_at):
|
||||
return False
|
||||
return True
|
||||
|
||||
def as_mnemonic(self) -> str:
|
||||
"""
|
||||
Get the recovery key as a mnemonic.
|
||||
"""
|
||||
return Mnemonic(language="english").to_mnemonic(bytes.fromhex(self.key))
|
||||
|
||||
@staticmethod
|
||||
def generate() -> "NewDeviceKey":
|
||||
"""
|
||||
Factory to generate a random token.
|
||||
"""
|
||||
creation_date = datetime.now(timezone.utc)
|
||||
key = secrets.token_bytes(16).hex()
|
||||
return NewDeviceKey(
|
||||
key=key,
|
||||
created_at=creation_date,
|
||||
expires_at=creation_date + timedelta(minutes=10),
|
||||
)
|
|
@ -0,0 +1,60 @@
|
|||
"""
|
||||
Recovery key used to obtain access token.
|
||||
|
||||
Recovery key has a token string, date of creation, optional date of expiration and optional count of uses left.
|
||||
"""
|
||||
from datetime import datetime, timezone
|
||||
import secrets
|
||||
from typing import Optional
|
||||
from pydantic import BaseModel
|
||||
from mnemonic import Mnemonic
|
||||
|
||||
from selfprivacy_api.models.tokens.time import is_past, ensure_timezone
|
||||
|
||||
|
||||
class RecoveryKey(BaseModel):
|
||||
"""
|
||||
Recovery key used to obtain access token.
|
||||
|
||||
Recovery key has a key string, date of creation, optional date of expiration and optional count of uses left.
|
||||
"""
|
||||
|
||||
key: str
|
||||
created_at: datetime
|
||||
expires_at: Optional[datetime]
|
||||
uses_left: Optional[int]
|
||||
|
||||
def is_valid(self) -> bool:
|
||||
"""
|
||||
Check if the recovery key is valid.
|
||||
"""
|
||||
if self.expires_at is not None and is_past(self.expires_at):
|
||||
return False
|
||||
if self.uses_left is not None and self.uses_left <= 0:
|
||||
return False
|
||||
return True
|
||||
|
||||
def as_mnemonic(self) -> str:
|
||||
"""
|
||||
Get the recovery key as a mnemonic.
|
||||
"""
|
||||
return Mnemonic(language="english").to_mnemonic(bytes.fromhex(self.key))
|
||||
|
||||
@staticmethod
|
||||
def generate(
|
||||
expiration: Optional[datetime],
|
||||
uses_left: Optional[int],
|
||||
) -> "RecoveryKey":
|
||||
"""
|
||||
Factory to generate a random token.
|
||||
"""
|
||||
creation_date = datetime.now(timezone.utc)
|
||||
if expiration is not None:
|
||||
expiration = ensure_timezone(expiration)
|
||||
key = secrets.token_bytes(24).hex()
|
||||
return RecoveryKey(
|
||||
key=key,
|
||||
created_at=creation_date,
|
||||
expires_at=expiration,
|
||||
uses_left=uses_left,
|
||||
)
|
|
@ -0,0 +1,13 @@
|
|||
from datetime import datetime, timezone
|
||||
|
||||
def is_past(dt: datetime) -> bool:
|
||||
# we cannot compare a naive now()
|
||||
# to dt which might be tz-aware or unaware
|
||||
dt = ensure_timezone(dt)
|
||||
return dt < datetime.now(timezone.utc)
|
||||
|
||||
def ensure_timezone(dt:datetime) -> datetime:
|
||||
if dt.tzinfo is None or dt.tzinfo.utcoffset(None) is None:
|
||||
dt = dt.replace(tzinfo= timezone.utc)
|
||||
return dt
|
||||
|
|
@ -0,0 +1,33 @@
|
|||
"""
|
||||
Model of the access token.
|
||||
|
||||
Access token has a token string, device name and date of creation.
|
||||
"""
|
||||
from datetime import datetime
|
||||
import secrets
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class Token(BaseModel):
|
||||
"""
|
||||
Model of the access token.
|
||||
|
||||
Access token has a token string, device name and date of creation.
|
||||
"""
|
||||
|
||||
token: str
|
||||
device_name: str
|
||||
created_at: datetime
|
||||
|
||||
@staticmethod
|
||||
def generate(device_name: str) -> "Token":
|
||||
"""
|
||||
Factory to generate a random token.
|
||||
"""
|
||||
creation_date = datetime.now()
|
||||
token = secrets.token_urlsafe(32)
|
||||
return Token(
|
||||
token=token,
|
||||
device_name=device_name,
|
||||
created_at=creation_date,
|
||||
)
|
|
@ -0,0 +1,8 @@
|
|||
from selfprivacy_api.repositories.tokens.abstract_tokens_repository import (
|
||||
AbstractTokensRepository,
|
||||
)
|
||||
from selfprivacy_api.repositories.tokens.json_tokens_repository import (
|
||||
JsonTokensRepository,
|
||||
)
|
||||
|
||||
repository = JsonTokensRepository()
|
|
@ -0,0 +1,225 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from mnemonic import Mnemonic
|
||||
from secrets import randbelow
|
||||
import re
|
||||
|
||||
from selfprivacy_api.models.tokens.token import Token
|
||||
from selfprivacy_api.repositories.tokens.exceptions import (
|
||||
TokenNotFound,
|
||||
InvalidMnemonic,
|
||||
RecoveryKeyNotFound,
|
||||
NewDeviceKeyNotFound,
|
||||
)
|
||||
from selfprivacy_api.models.tokens.recovery_key import RecoveryKey
|
||||
from selfprivacy_api.models.tokens.new_device_key import NewDeviceKey
|
||||
|
||||
|
||||
class AbstractTokensRepository(ABC):
|
||||
def get_token_by_token_string(self, token_string: str) -> Token:
|
||||
"""Get the token by token"""
|
||||
tokens = self.get_tokens()
|
||||
for token in tokens:
|
||||
if token.token == token_string:
|
||||
return token
|
||||
|
||||
raise TokenNotFound("Token not found!")
|
||||
|
||||
def get_token_by_name(self, token_name: str) -> Token:
|
||||
"""Get the token by name"""
|
||||
tokens = self.get_tokens()
|
||||
for token in tokens:
|
||||
if token.device_name == token_name:
|
||||
return token
|
||||
|
||||
raise TokenNotFound("Token not found!")
|
||||
|
||||
@abstractmethod
|
||||
def get_tokens(self) -> list[Token]:
|
||||
"""Get the tokens"""
|
||||
|
||||
def create_token(self, device_name: str) -> Token:
|
||||
"""Create new token"""
|
||||
unique_name = self._make_unique_device_name(device_name)
|
||||
new_token = Token.generate(unique_name)
|
||||
|
||||
self._store_token(new_token)
|
||||
|
||||
return new_token
|
||||
|
||||
@abstractmethod
|
||||
def delete_token(self, input_token: Token) -> None:
|
||||
"""Delete the token"""
|
||||
|
||||
def refresh_token(self, input_token: Token) -> Token:
|
||||
"""Change the token field of the existing token"""
|
||||
new_token = Token.generate(device_name=input_token.device_name)
|
||||
new_token.created_at = input_token.created_at
|
||||
|
||||
if input_token in self.get_tokens():
|
||||
self.delete_token(input_token)
|
||||
self._store_token(new_token)
|
||||
return new_token
|
||||
|
||||
raise TokenNotFound("Token not found!")
|
||||
|
||||
def is_token_valid(self, token_string: str) -> bool:
|
||||
"""Check if the token is valid"""
|
||||
return token_string in [token.token for token in self.get_tokens()]
|
||||
|
||||
def is_token_name_exists(self, token_name: str) -> bool:
|
||||
"""Check if the token name exists"""
|
||||
return token_name in [token.device_name for token in self.get_tokens()]
|
||||
|
||||
def is_token_name_pair_valid(self, token_name: str, token_string: str) -> bool:
|
||||
"""Check if the token name and token are valid"""
|
||||
try:
|
||||
token = self.get_token_by_name(token_name)
|
||||
if token is None:
|
||||
return False
|
||||
except TokenNotFound:
|
||||
return False
|
||||
return token.token == token_string
|
||||
|
||||
@abstractmethod
|
||||
def get_recovery_key(self) -> Optional[RecoveryKey]:
|
||||
"""Get the recovery key"""
|
||||
|
||||
def create_recovery_key(
|
||||
self,
|
||||
expiration: Optional[datetime],
|
||||
uses_left: Optional[int],
|
||||
) -> RecoveryKey:
|
||||
"""Create the recovery key"""
|
||||
recovery_key = RecoveryKey.generate(expiration, uses_left)
|
||||
self._store_recovery_key(recovery_key)
|
||||
return recovery_key
|
||||
|
||||
def use_mnemonic_recovery_key(
|
||||
self, mnemonic_phrase: str, device_name: str
|
||||
) -> Token:
|
||||
"""Use the mnemonic recovery key and create a new token with the given name"""
|
||||
if not self.is_recovery_key_valid():
|
||||
raise RecoveryKeyNotFound("Recovery key not found")
|
||||
|
||||
recovery_key = self.get_recovery_key()
|
||||
|
||||
if recovery_key is None:
|
||||
raise RecoveryKeyNotFound("Recovery key not found")
|
||||
|
||||
recovery_hex_key = recovery_key.key
|
||||
if not self._assert_mnemonic(recovery_hex_key, mnemonic_phrase):
|
||||
raise RecoveryKeyNotFound("Recovery key not found")
|
||||
|
||||
new_token = self.create_token(device_name=device_name)
|
||||
|
||||
self._decrement_recovery_token()
|
||||
|
||||
return new_token
|
||||
|
||||
def is_recovery_key_valid(self) -> bool:
|
||||
"""Check if the recovery key is valid"""
|
||||
recovery_key = self.get_recovery_key()
|
||||
if recovery_key is None:
|
||||
return False
|
||||
return recovery_key.is_valid()
|
||||
|
||||
@abstractmethod
|
||||
def _store_recovery_key(self, recovery_key: RecoveryKey) -> None:
|
||||
"""Store recovery key directly"""
|
||||
|
||||
@abstractmethod
|
||||
def _delete_recovery_key(self) -> None:
|
||||
"""Delete the recovery key"""
|
||||
|
||||
def get_new_device_key(self) -> NewDeviceKey:
|
||||
"""Creates and returns the new device key"""
|
||||
new_device_key = NewDeviceKey.generate()
|
||||
self._store_new_device_key(new_device_key)
|
||||
|
||||
return new_device_key
|
||||
|
||||
def _store_new_device_key(self, new_device_key: NewDeviceKey) -> None:
|
||||
"""Store new device key directly"""
|
||||
|
||||
@abstractmethod
|
||||
def delete_new_device_key(self) -> None:
|
||||
"""Delete the new device key"""
|
||||
|
||||
def use_mnemonic_new_device_key(
|
||||
self, mnemonic_phrase: str, device_name: str
|
||||
) -> Token:
|
||||
"""Use the mnemonic new device key"""
|
||||
new_device_key = self._get_stored_new_device_key()
|
||||
if not new_device_key:
|
||||
raise NewDeviceKeyNotFound
|
||||
|
||||
if not new_device_key.is_valid():
|
||||
raise NewDeviceKeyNotFound
|
||||
|
||||
if not self._assert_mnemonic(new_device_key.key, mnemonic_phrase):
|
||||
raise NewDeviceKeyNotFound("Phrase is not token!")
|
||||
|
||||
new_token = self.create_token(device_name=device_name)
|
||||
self.delete_new_device_key()
|
||||
|
||||
return new_token
|
||||
|
||||
def reset(self):
|
||||
for token in self.get_tokens():
|
||||
self.delete_token(token)
|
||||
self.delete_new_device_key()
|
||||
self._delete_recovery_key()
|
||||
|
||||
def clone(self, source: AbstractTokensRepository) -> None:
|
||||
"""Clone the state of another repository to this one"""
|
||||
self.reset()
|
||||
for token in source.get_tokens():
|
||||
self._store_token(token)
|
||||
|
||||
recovery_key = source.get_recovery_key()
|
||||
if recovery_key is not None:
|
||||
self._store_recovery_key(recovery_key)
|
||||
|
||||
new_device_key = source._get_stored_new_device_key()
|
||||
if new_device_key is not None:
|
||||
self._store_new_device_key(new_device_key)
|
||||
|
||||
@abstractmethod
|
||||
def _store_token(self, new_token: Token):
|
||||
"""Store a token directly"""
|
||||
|
||||
@abstractmethod
|
||||
def _decrement_recovery_token(self):
|
||||
"""Decrement recovery key use count by one"""
|
||||
|
||||
@abstractmethod
|
||||
def _get_stored_new_device_key(self) -> Optional[NewDeviceKey]:
|
||||
"""Retrieves new device key that is already stored."""
|
||||
|
||||
def _make_unique_device_name(self, name: str) -> str:
|
||||
"""Token name must be an alphanumeric string and not empty.
|
||||
Replace invalid characters with '_'
|
||||
If name exists, add a random number to the end of the name until it is unique.
|
||||
"""
|
||||
if not re.match("^[a-zA-Z0-9]*$", name):
|
||||
name = re.sub("[^a-zA-Z0-9]", "_", name)
|
||||
if name == "":
|
||||
name = "Unknown device"
|
||||
while self.is_token_name_exists(name):
|
||||
name += str(randbelow(10))
|
||||
return name
|
||||
|
||||
# TODO: find a proper place for it
|
||||
def _assert_mnemonic(self, hex_key: str, mnemonic_phrase: str):
|
||||
"""Return true if hex string matches the phrase, false otherwise
|
||||
Raise an InvalidMnemonic error if not mnemonic"""
|
||||
recovery_token = bytes.fromhex(hex_key)
|
||||
if not Mnemonic(language="english").check(mnemonic_phrase):
|
||||
raise InvalidMnemonic("Phrase is not mnemonic!")
|
||||
|
||||
phrase_bytes = Mnemonic(language="english").to_entropy(mnemonic_phrase)
|
||||
return phrase_bytes == recovery_token
|
|
@ -0,0 +1,14 @@
|
|||
class TokenNotFound(Exception):
|
||||
"""Token not found!"""
|
||||
|
||||
|
||||
class RecoveryKeyNotFound(Exception):
|
||||
"""Recovery key not found!"""
|
||||
|
||||
|
||||
class InvalidMnemonic(Exception):
|
||||
"""Phrase is not mnemonic!"""
|
||||
|
||||
|
||||
class NewDeviceKeyNotFound(Exception):
|
||||
"""New device key not found!"""
|
|
@ -0,0 +1,153 @@
|
|||
"""
|
||||
temporary legacy
|
||||
"""
|
||||
from typing import Optional
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from selfprivacy_api.utils import UserDataFiles, WriteUserData, ReadUserData
|
||||
from selfprivacy_api.models.tokens.token import Token
|
||||
from selfprivacy_api.models.tokens.recovery_key import RecoveryKey
|
||||
from selfprivacy_api.models.tokens.new_device_key import NewDeviceKey
|
||||
from selfprivacy_api.repositories.tokens.exceptions import (
|
||||
TokenNotFound,
|
||||
)
|
||||
from selfprivacy_api.repositories.tokens.abstract_tokens_repository import (
|
||||
AbstractTokensRepository,
|
||||
)
|
||||
|
||||
|
||||
DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%f"
|
||||
|
||||
|
||||
class JsonTokensRepository(AbstractTokensRepository):
|
||||
def get_tokens(self) -> list[Token]:
|
||||
"""Get the tokens"""
|
||||
tokens_list = []
|
||||
|
||||
with ReadUserData(UserDataFiles.TOKENS) as tokens_file:
|
||||
for userdata_token in tokens_file["tokens"]:
|
||||
tokens_list.append(
|
||||
Token(
|
||||
token=userdata_token["token"],
|
||||
device_name=userdata_token["name"],
|
||||
created_at=userdata_token["date"],
|
||||
)
|
||||
)
|
||||
|
||||
return tokens_list
|
||||
|
||||
def _store_token(self, new_token: Token):
|
||||
"""Store a token directly"""
|
||||
with WriteUserData(UserDataFiles.TOKENS) as tokens_file:
|
||||
tokens_file["tokens"].append(
|
||||
{
|
||||
"token": new_token.token,
|
||||
"name": new_token.device_name,
|
||||
"date": new_token.created_at.strftime(DATETIME_FORMAT),
|
||||
}
|
||||
)
|
||||
|
||||
def delete_token(self, input_token: Token) -> None:
|
||||
"""Delete the token"""
|
||||
with WriteUserData(UserDataFiles.TOKENS) as tokens_file:
|
||||
for userdata_token in tokens_file["tokens"]:
|
||||
if userdata_token["token"] == input_token.token:
|
||||
tokens_file["tokens"].remove(userdata_token)
|
||||
return
|
||||
|
||||
raise TokenNotFound("Token not found!")
|
||||
|
||||
def __key_date_from_str(self, date_string: str) -> datetime:
|
||||
if date_string is None or date_string == "":
|
||||
return None
|
||||
# we assume that we store dates in json as naive utc
|
||||
utc_no_tz = datetime.fromisoformat(date_string)
|
||||
utc_with_tz = utc_no_tz.replace(tzinfo=timezone.utc)
|
||||
return utc_with_tz
|
||||
|
||||
def __date_from_tokens_file(
|
||||
self, tokens_file: object, tokenfield: str, datefield: str
|
||||
):
|
||||
date_string = tokens_file[tokenfield].get(datefield)
|
||||
return self.__key_date_from_str(date_string)
|
||||
|
||||
def get_recovery_key(self) -> Optional[RecoveryKey]:
|
||||
"""Get the recovery key"""
|
||||
with ReadUserData(UserDataFiles.TOKENS) as tokens_file:
|
||||
|
||||
if (
|
||||
"recovery_token" not in tokens_file
|
||||
or tokens_file["recovery_token"] is None
|
||||
):
|
||||
return
|
||||
|
||||
recovery_key = RecoveryKey(
|
||||
key=tokens_file["recovery_token"].get("token"),
|
||||
created_at=self.__date_from_tokens_file(
|
||||
tokens_file, "recovery_token", "date"
|
||||
),
|
||||
expires_at=self.__date_from_tokens_file(
|
||||
tokens_file, "recovery_token", "expiration"
|
||||
),
|
||||
uses_left=tokens_file["recovery_token"].get("uses_left"),
|
||||
)
|
||||
|
||||
return recovery_key
|
||||
|
||||
def _store_recovery_key(self, recovery_key: RecoveryKey) -> None:
|
||||
with WriteUserData(UserDataFiles.TOKENS) as tokens_file:
|
||||
key_expiration: Optional[str] = None
|
||||
if recovery_key.expires_at is not None:
|
||||
key_expiration = recovery_key.expires_at.strftime(DATETIME_FORMAT)
|
||||
tokens_file["recovery_token"] = {
|
||||
"token": recovery_key.key,
|
||||
"date": recovery_key.created_at.strftime(DATETIME_FORMAT),
|
||||
"expiration": key_expiration,
|
||||
"uses_left": recovery_key.uses_left,
|
||||
}
|
||||
|
||||
def _decrement_recovery_token(self):
|
||||
"""Decrement recovery key use count by one"""
|
||||
if self.is_recovery_key_valid():
|
||||
with WriteUserData(UserDataFiles.TOKENS) as tokens:
|
||||
if tokens["recovery_token"]["uses_left"] is not None:
|
||||
tokens["recovery_token"]["uses_left"] -= 1
|
||||
|
||||
def _delete_recovery_key(self) -> None:
|
||||
"""Delete the recovery key"""
|
||||
with WriteUserData(UserDataFiles.TOKENS) as tokens_file:
|
||||
if "recovery_token" in tokens_file:
|
||||
del tokens_file["recovery_token"]
|
||||
return
|
||||
|
||||
def _store_new_device_key(self, new_device_key: NewDeviceKey) -> None:
|
||||
with WriteUserData(UserDataFiles.TOKENS) as tokens_file:
|
||||
tokens_file["new_device"] = {
|
||||
"token": new_device_key.key,
|
||||
"date": new_device_key.created_at.strftime(DATETIME_FORMAT),
|
||||
"expiration": new_device_key.expires_at.strftime(DATETIME_FORMAT),
|
||||
}
|
||||
|
||||
def delete_new_device_key(self) -> None:
|
||||
"""Delete the new device key"""
|
||||
with WriteUserData(UserDataFiles.TOKENS) as tokens_file:
|
||||
if "new_device" in tokens_file:
|
||||
del tokens_file["new_device"]
|
||||
return
|
||||
|
||||
def _get_stored_new_device_key(self) -> Optional[NewDeviceKey]:
|
||||
"""Retrieves new device key that is already stored."""
|
||||
with ReadUserData(UserDataFiles.TOKENS) as tokens_file:
|
||||
if "new_device" not in tokens_file or tokens_file["new_device"] is None:
|
||||
return
|
||||
|
||||
new_device_key = NewDeviceKey(
|
||||
key=tokens_file["new_device"]["token"],
|
||||
created_at=self.__date_from_tokens_file(
|
||||
tokens_file, "new_device", "date"
|
||||
),
|
||||
expires_at=self.__date_from_tokens_file(
|
||||
tokens_file, "new_device", "expiration"
|
||||
),
|
||||
)
|
||||
return new_device_key
|
|
@ -0,0 +1,149 @@
|
|||
"""
|
||||
Token repository using Redis as backend.
|
||||
"""
|
||||
from typing import Optional
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from selfprivacy_api.repositories.tokens.abstract_tokens_repository import (
|
||||
AbstractTokensRepository,
|
||||
)
|
||||
from selfprivacy_api.utils.redis_pool import RedisPool
|
||||
from selfprivacy_api.models.tokens.token import Token
|
||||
from selfprivacy_api.models.tokens.recovery_key import RecoveryKey
|
||||
from selfprivacy_api.models.tokens.new_device_key import NewDeviceKey
|
||||
from selfprivacy_api.repositories.tokens.exceptions import TokenNotFound
|
||||
|
||||
TOKENS_PREFIX = "token_repo:tokens:"
|
||||
NEW_DEVICE_KEY_REDIS_KEY = "token_repo:new_device_key"
|
||||
RECOVERY_KEY_REDIS_KEY = "token_repo:recovery_key"
|
||||
|
||||
|
||||
class RedisTokensRepository(AbstractTokensRepository):
|
||||
"""
|
||||
Token repository using Redis as a backend
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.connection = RedisPool().get_connection()
|
||||
|
||||
@staticmethod
|
||||
def token_key_for_device(device_name: str):
|
||||
return TOKENS_PREFIX + str(hash(device_name))
|
||||
|
||||
def get_tokens(self) -> list[Token]:
|
||||
"""Get the tokens"""
|
||||
redis = self.connection
|
||||
token_keys = redis.keys(TOKENS_PREFIX + "*")
|
||||
tokens = []
|
||||
for key in token_keys:
|
||||
token = self._token_from_hash(key)
|
||||
if token is not None:
|
||||
# token creation dates are temporarily not tz-aware
|
||||
token.created_at = token.created_at.replace(tzinfo=None)
|
||||
tokens.append(token)
|
||||
return tokens
|
||||
|
||||
def delete_token(self, input_token: Token) -> None:
|
||||
"""Delete the token"""
|
||||
redis = self.connection
|
||||
key = RedisTokensRepository._token_redis_key(input_token)
|
||||
if input_token not in self.get_tokens():
|
||||
raise TokenNotFound
|
||||
redis.delete(key)
|
||||
|
||||
def get_recovery_key(self) -> Optional[RecoveryKey]:
|
||||
"""Get the recovery key"""
|
||||
redis = self.connection
|
||||
if redis.exists(RECOVERY_KEY_REDIS_KEY):
|
||||
return self._recovery_key_from_hash(RECOVERY_KEY_REDIS_KEY)
|
||||
return None
|
||||
|
||||
def _store_recovery_key(self, recovery_key: RecoveryKey) -> None:
|
||||
self._store_model_as_hash(RECOVERY_KEY_REDIS_KEY, recovery_key)
|
||||
|
||||
def _delete_recovery_key(self) -> None:
|
||||
"""Delete the recovery key"""
|
||||
redis = self.connection
|
||||
redis.delete(RECOVERY_KEY_REDIS_KEY)
|
||||
|
||||
def _store_new_device_key(self, new_device_key: NewDeviceKey) -> None:
|
||||
"""Store new device key directly"""
|
||||
self._store_model_as_hash(NEW_DEVICE_KEY_REDIS_KEY, new_device_key)
|
||||
|
||||
def delete_new_device_key(self) -> None:
|
||||
"""Delete the new device key"""
|
||||
redis = self.connection
|
||||
redis.delete(NEW_DEVICE_KEY_REDIS_KEY)
|
||||
|
||||
@staticmethod
|
||||
def _token_redis_key(token: Token) -> str:
|
||||
return RedisTokensRepository.token_key_for_device(token.device_name)
|
||||
|
||||
def _store_token(self, new_token: Token):
|
||||
"""Store a token directly"""
|
||||
key = RedisTokensRepository._token_redis_key(new_token)
|
||||
self._store_model_as_hash(key, new_token)
|
||||
|
||||
def _decrement_recovery_token(self):
|
||||
"""Decrement recovery key use count by one"""
|
||||
if self.is_recovery_key_valid():
|
||||
recovery_key = self.get_recovery_key()
|
||||
if recovery_key is None:
|
||||
return
|
||||
uses_left = recovery_key.uses_left
|
||||
if uses_left is not None:
|
||||
redis = self.connection
|
||||
redis.hset(RECOVERY_KEY_REDIS_KEY, "uses_left", uses_left - 1)
|
||||
|
||||
def _get_stored_new_device_key(self) -> Optional[NewDeviceKey]:
|
||||
"""Retrieves new device key that is already stored."""
|
||||
return self._new_device_key_from_hash(NEW_DEVICE_KEY_REDIS_KEY)
|
||||
|
||||
@staticmethod
|
||||
def _is_date_key(key: str):
|
||||
return key in [
|
||||
"created_at",
|
||||
"expires_at",
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def _prepare_model_dict(d: dict):
|
||||
date_keys = [key for key in d.keys() if RedisTokensRepository._is_date_key(key)]
|
||||
for date in date_keys:
|
||||
if d[date] != "None":
|
||||
d[date] = datetime.fromisoformat(d[date])
|
||||
for key in d.keys():
|
||||
if d[key] == "None":
|
||||
d[key] = None
|
||||
|
||||
def _model_dict_from_hash(self, redis_key: str) -> Optional[dict]:
|
||||
redis = self.connection
|
||||
if redis.exists(redis_key):
|
||||
token_dict = redis.hgetall(redis_key)
|
||||
RedisTokensRepository._prepare_model_dict(token_dict)
|
||||
return token_dict
|
||||
return None
|
||||
|
||||
def _hash_as_model(self, redis_key: str, model_class):
|
||||
token_dict = self._model_dict_from_hash(redis_key)
|
||||
if token_dict is not None:
|
||||
return model_class(**token_dict)
|
||||
return None
|
||||
|
||||
def _token_from_hash(self, redis_key: str) -> Optional[Token]:
|
||||
return self._hash_as_model(redis_key, Token)
|
||||
|
||||
def _recovery_key_from_hash(self, redis_key: str) -> Optional[RecoveryKey]:
|
||||
return self._hash_as_model(redis_key, RecoveryKey)
|
||||
|
||||
def _new_device_key_from_hash(self, redis_key: str) -> Optional[NewDeviceKey]:
|
||||
return self._hash_as_model(redis_key, NewDeviceKey)
|
||||
|
||||
def _store_model_as_hash(self, redis_key, model):
|
||||
redis = self.connection
|
||||
for key, value in model.dict().items():
|
||||
if isinstance(value, datetime):
|
||||
if value.tzinfo is None:
|
||||
value = value.replace(tzinfo=timezone.utc)
|
||||
value = value.isoformat()
|
||||
redis.hset(redis_key, key, str(value))
|
|
@ -8,20 +8,18 @@ from selfprivacy_api.actions.api_tokens import (
|
|||
InvalidUsesLeft,
|
||||
NotFoundException,
|
||||
delete_api_token,
|
||||
refresh_api_token,
|
||||
get_api_recovery_token_status,
|
||||
get_api_tokens_with_caller_flag,
|
||||
get_new_api_recovery_key,
|
||||
refresh_api_token,
|
||||
use_mnemonic_recovery_token,
|
||||
delete_new_device_auth_token,
|
||||
get_new_device_auth_token,
|
||||
use_new_device_auth_token,
|
||||
)
|
||||
|
||||
from selfprivacy_api.dependencies import TokenHeader, get_token_header
|
||||
|
||||
from selfprivacy_api.utils.auth import (
|
||||
delete_new_device_auth_token,
|
||||
get_new_device_auth_token,
|
||||
use_mnemonic_recoverery_token,
|
||||
use_new_device_auth_token,
|
||||
)
|
||||
|
||||
router = APIRouter(
|
||||
prefix="/auth",
|
||||
|
@ -99,7 +97,7 @@ class UseTokenInput(BaseModel):
|
|||
|
||||
@router.post("/recovery_token/use")
|
||||
async def rest_use_recovery_token(input: UseTokenInput):
|
||||
token = use_mnemonic_recoverery_token(input.token, input.device)
|
||||
token = use_mnemonic_recovery_token(input.token, input.device)
|
||||
if token is None:
|
||||
raise HTTPException(status_code=404, detail="Token not found")
|
||||
return {"token": token}
|
||||
|
|
|
@ -257,24 +257,25 @@ async def restore_restic_backup(backup: BackupRestoreInput):
|
|||
raise HTTPException(status_code=404, detail="Backup not found")
|
||||
|
||||
|
||||
class BackblazeConfigInput(BaseModel):
|
||||
class BackupConfigInput(BaseModel):
|
||||
accountId: str
|
||||
accountKey: str
|
||||
bucket: str
|
||||
|
||||
|
||||
@router.put("/restic/backblaze/config")
|
||||
async def set_backblaze_config(backblaze_config: BackblazeConfigInput):
|
||||
async def set_backblaze_config(backup_config: BackupConfigInput):
|
||||
with WriteUserData() as data:
|
||||
if "backblaze" not in data:
|
||||
data["backblaze"] = {}
|
||||
data["backblaze"]["accountId"] = backblaze_config.accountId
|
||||
data["backblaze"]["accountKey"] = backblaze_config.accountKey
|
||||
data["backblaze"]["bucket"] = backblaze_config.bucket
|
||||
if "backup" not in data:
|
||||
data["backup"] = {}
|
||||
data["backup"]["provider"] = "BACKBLAZE"
|
||||
data["backup"]["accountId"] = backup_config.accountId
|
||||
data["backup"]["accountKey"] = backup_config.accountKey
|
||||
data["backup"]["bucket"] = backup_config.bucket
|
||||
|
||||
restic_tasks.update_keys_from_userdata()
|
||||
|
||||
return "New Backblaze settings saved"
|
||||
return "New backup settings saved"
|
||||
|
||||
|
||||
@router.post("/ssh/enable")
|
||||
|
|
|
@ -7,6 +7,7 @@ from threading import Lock
|
|||
from enum import Enum
|
||||
import portalocker
|
||||
from selfprivacy_api.utils import ReadUserData
|
||||
from selfprivacy_api.utils.singleton_metaclass import SingletonMetaclass
|
||||
|
||||
|
||||
class ResticStates(Enum):
|
||||
|
@ -21,7 +22,7 @@ class ResticStates(Enum):
|
|||
INITIALIZING = 6
|
||||
|
||||
|
||||
class ResticController:
|
||||
class ResticController(metaclass=SingletonMetaclass):
|
||||
"""
|
||||
States in wich the restic_controller may be
|
||||
- no backblaze key
|
||||
|
@ -35,16 +36,8 @@ class ResticController:
|
|||
Current state can be fetched with get_state()
|
||||
"""
|
||||
|
||||
_instance = None
|
||||
_lock = Lock()
|
||||
_initialized = False
|
||||
|
||||
def __new__(cls):
|
||||
if not cls._instance:
|
||||
with cls._lock:
|
||||
cls._instance = super(ResticController, cls).__new__(cls)
|
||||
return cls._instance
|
||||
|
||||
def __init__(self):
|
||||
if self._initialized:
|
||||
return
|
||||
|
|
|
@ -144,7 +144,7 @@ class Bitwarden(Service):
|
|||
]
|
||||
|
||||
def move_to_volume(self, volume: BlockDevice) -> Job:
|
||||
job = Jobs.get_instance().add(
|
||||
job = Jobs.add(
|
||||
type_id="services.bitwarden.move",
|
||||
name="Move Bitwarden",
|
||||
description=f"Moving Bitwarden data to {volume.name}",
|
||||
|
|
|
@ -29,7 +29,7 @@ def move_service(
|
|||
userdata_location: str,
|
||||
):
|
||||
"""Move a service to another volume."""
|
||||
job = Jobs.get_instance().update(
|
||||
job = Jobs.update(
|
||||
job=job,
|
||||
status_text="Performing pre-move checks...",
|
||||
status=JobStatus.RUNNING,
|
||||
|
@ -37,7 +37,7 @@ def move_service(
|
|||
service_name = service.get_display_name()
|
||||
with ReadUserData() as user_data:
|
||||
if not user_data.get("useBinds", False):
|
||||
Jobs.get_instance().update(
|
||||
Jobs.update(
|
||||
job=job,
|
||||
status=JobStatus.ERROR,
|
||||
error="Server is not using binds.",
|
||||
|
@ -46,7 +46,7 @@ def move_service(
|
|||
# Check if we are on the same volume
|
||||
old_volume = service.get_location()
|
||||
if old_volume == volume.name:
|
||||
Jobs.get_instance().update(
|
||||
Jobs.update(
|
||||
job=job,
|
||||
status=JobStatus.ERROR,
|
||||
error=f"{service_name} is already on this volume.",
|
||||
|
@ -54,7 +54,7 @@ def move_service(
|
|||
return
|
||||
# Check if there is enough space on the new volume
|
||||
if int(volume.fsavail) < service.get_storage_usage():
|
||||
Jobs.get_instance().update(
|
||||
Jobs.update(
|
||||
job=job,
|
||||
status=JobStatus.ERROR,
|
||||
error="Not enough space on the new volume.",
|
||||
|
@ -62,7 +62,7 @@ def move_service(
|
|||
return
|
||||
# Make sure the volume is mounted
|
||||
if volume.name != "sda1" and f"/volumes/{volume.name}" not in volume.mountpoints:
|
||||
Jobs.get_instance().update(
|
||||
Jobs.update(
|
||||
job=job,
|
||||
status=JobStatus.ERROR,
|
||||
error="Volume is not mounted.",
|
||||
|
@ -71,14 +71,14 @@ def move_service(
|
|||
# Make sure current actual directory exists and if its user and group are correct
|
||||
for folder in folder_names:
|
||||
if not pathlib.Path(f"/volumes/{old_volume}/{folder.name}").exists():
|
||||
Jobs.get_instance().update(
|
||||
Jobs.update(
|
||||
job=job,
|
||||
status=JobStatus.ERROR,
|
||||
error=f"{service_name} is not found.",
|
||||
)
|
||||
return
|
||||
if not pathlib.Path(f"/volumes/{old_volume}/{folder.name}").is_dir():
|
||||
Jobs.get_instance().update(
|
||||
Jobs.update(
|
||||
job=job,
|
||||
status=JobStatus.ERROR,
|
||||
error=f"{service_name} is not a directory.",
|
||||
|
@ -88,7 +88,7 @@ def move_service(
|
|||
not pathlib.Path(f"/volumes/{old_volume}/{folder.name}").owner()
|
||||
== folder.owner
|
||||
):
|
||||
Jobs.get_instance().update(
|
||||
Jobs.update(
|
||||
job=job,
|
||||
status=JobStatus.ERROR,
|
||||
error=f"{service_name} owner is not {folder.owner}.",
|
||||
|
@ -96,7 +96,7 @@ def move_service(
|
|||
return
|
||||
|
||||
# Stop service
|
||||
Jobs.get_instance().update(
|
||||
Jobs.update(
|
||||
job=job,
|
||||
status=JobStatus.RUNNING,
|
||||
status_text=f"Stopping {service_name}...",
|
||||
|
@ -113,7 +113,7 @@ def move_service(
|
|||
break
|
||||
time.sleep(1)
|
||||
else:
|
||||
Jobs.get_instance().update(
|
||||
Jobs.update(
|
||||
job=job,
|
||||
status=JobStatus.ERROR,
|
||||
error=f"{service_name} did not stop in 30 seconds.",
|
||||
|
@ -121,7 +121,7 @@ def move_service(
|
|||
return
|
||||
|
||||
# Unmount old volume
|
||||
Jobs.get_instance().update(
|
||||
Jobs.update(
|
||||
job=job,
|
||||
status_text="Unmounting old folder...",
|
||||
status=JobStatus.RUNNING,
|
||||
|
@ -134,14 +134,14 @@ def move_service(
|
|||
check=True,
|
||||
)
|
||||
except subprocess.CalledProcessError:
|
||||
Jobs.get_instance().update(
|
||||
Jobs.update(
|
||||
job=job,
|
||||
status=JobStatus.ERROR,
|
||||
error="Unable to unmount old volume.",
|
||||
)
|
||||
return
|
||||
# Move data to new volume and set correct permissions
|
||||
Jobs.get_instance().update(
|
||||
Jobs.update(
|
||||
job=job,
|
||||
status_text="Moving data to new volume...",
|
||||
status=JobStatus.RUNNING,
|
||||
|
@ -154,14 +154,14 @@ def move_service(
|
|||
f"/volumes/{old_volume}/{folder.name}",
|
||||
f"/volumes/{volume.name}/{folder.name}",
|
||||
)
|
||||
Jobs.get_instance().update(
|
||||
Jobs.update(
|
||||
job=job,
|
||||
status_text="Moving data to new volume...",
|
||||
status=JobStatus.RUNNING,
|
||||
progress=current_progress + folder_percentage,
|
||||
)
|
||||
|
||||
Jobs.get_instance().update(
|
||||
Jobs.update(
|
||||
job=job,
|
||||
status_text=f"Making sure {service_name} owns its files...",
|
||||
status=JobStatus.RUNNING,
|
||||
|
@ -180,14 +180,14 @@ def move_service(
|
|||
)
|
||||
except subprocess.CalledProcessError as error:
|
||||
print(error.output)
|
||||
Jobs.get_instance().update(
|
||||
Jobs.update(
|
||||
job=job,
|
||||
status=JobStatus.RUNNING,
|
||||
error=f"Unable to set ownership of new volume. {service_name} may not be able to access its files. Continuing anyway.",
|
||||
)
|
||||
|
||||
# Mount new volume
|
||||
Jobs.get_instance().update(
|
||||
Jobs.update(
|
||||
job=job,
|
||||
status_text=f"Mounting {service_name} data...",
|
||||
status=JobStatus.RUNNING,
|
||||
|
@ -207,7 +207,7 @@ def move_service(
|
|||
)
|
||||
except subprocess.CalledProcessError as error:
|
||||
print(error.output)
|
||||
Jobs.get_instance().update(
|
||||
Jobs.update(
|
||||
job=job,
|
||||
status=JobStatus.ERROR,
|
||||
error="Unable to mount new volume.",
|
||||
|
@ -215,7 +215,7 @@ def move_service(
|
|||
return
|
||||
|
||||
# Update userdata
|
||||
Jobs.get_instance().update(
|
||||
Jobs.update(
|
||||
job=job,
|
||||
status_text="Finishing move...",
|
||||
status=JobStatus.RUNNING,
|
||||
|
@ -227,7 +227,7 @@ def move_service(
|
|||
user_data[userdata_location]["location"] = volume.name
|
||||
# Start service
|
||||
service.start()
|
||||
Jobs.get_instance().update(
|
||||
Jobs.update(
|
||||
job=job,
|
||||
status=JobStatus.FINISHED,
|
||||
result=f"{service_name} moved successfully.",
|
||||
|
|
|
@ -141,7 +141,7 @@ class Gitea(Service):
|
|||
]
|
||||
|
||||
def move_to_volume(self, volume: BlockDevice) -> Job:
|
||||
job = Jobs.get_instance().add(
|
||||
job = Jobs.add(
|
||||
type_id="services.gitea.move",
|
||||
name="Move Gitea",
|
||||
description=f"Moving Gitea data to {volume.name}",
|
||||
|
|
|
@ -149,7 +149,7 @@ class MailServer(Service):
|
|||
]
|
||||
|
||||
def move_to_volume(self, volume: BlockDevice) -> Job:
|
||||
job = Jobs.get_instance().add(
|
||||
job = Jobs.add(
|
||||
type_id="services.mailserver.move",
|
||||
name="Move Mail Server",
|
||||
description=f"Moving mailserver data to {volume.name}",
|
||||
|
|
|
@ -149,7 +149,7 @@ class Nextcloud(Service):
|
|||
]
|
||||
|
||||
def move_to_volume(self, volume: BlockDevice) -> Job:
|
||||
job = Jobs.get_instance().add(
|
||||
job = Jobs.add(
|
||||
type_id="services.nextcloud.move",
|
||||
name="Move Nextcloud",
|
||||
description=f"Moving Nextcloud to volume {volume.name}",
|
||||
|
|
|
@ -129,7 +129,7 @@ class Pleroma(Service):
|
|||
]
|
||||
|
||||
def move_to_volume(self, volume: BlockDevice) -> Job:
|
||||
job = Jobs.get_instance().add(
|
||||
job = Jobs.add(
|
||||
type_id="services.pleroma.move",
|
||||
name="Move Pleroma",
|
||||
description=f"Moving Pleroma to volume {volume.name}",
|
||||
|
|
|
@ -1,329 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
"""Token management utils"""
|
||||
import secrets
|
||||
from datetime import datetime, timedelta
|
||||
import re
|
||||
import typing
|
||||
|
||||
from pydantic import BaseModel
|
||||
from mnemonic import Mnemonic
|
||||
|
||||
from . import ReadUserData, UserDataFiles, WriteUserData, parse_date
|
||||
|
||||
"""
|
||||
Token are stored in the tokens.json file.
|
||||
File contains device tokens, recovery token and new device auth token.
|
||||
File structure:
|
||||
{
|
||||
"tokens": [
|
||||
{
|
||||
"token": "device token",
|
||||
"name": "device name",
|
||||
"date": "date of creation",
|
||||
}
|
||||
],
|
||||
"recovery_token": {
|
||||
"token": "recovery token",
|
||||
"date": "date of creation",
|
||||
"expiration": "date of expiration",
|
||||
"uses_left": "number of uses left"
|
||||
},
|
||||
"new_device": {
|
||||
"token": "new device auth token",
|
||||
"date": "date of creation",
|
||||
"expiration": "date of expiration",
|
||||
}
|
||||
}
|
||||
Recovery token may or may not have expiration date and uses_left.
|
||||
There may be no recovery token at all.
|
||||
Device tokens must be unique.
|
||||
"""
|
||||
|
||||
|
||||
def _get_tokens():
|
||||
"""Get all tokens as list of tokens of every device"""
|
||||
with ReadUserData(UserDataFiles.TOKENS) as tokens:
|
||||
return [token["token"] for token in tokens["tokens"]]
|
||||
|
||||
|
||||
def _get_token_names():
|
||||
"""Get all token names"""
|
||||
with ReadUserData(UserDataFiles.TOKENS) as tokens:
|
||||
return [t["name"] for t in tokens["tokens"]]
|
||||
|
||||
|
||||
def _validate_token_name(name):
|
||||
"""Token name must be an alphanumeric string and not empty.
|
||||
Replace invalid characters with '_'
|
||||
If token name exists, add a random number to the end of the name until it is unique.
|
||||
"""
|
||||
if not re.match("^[a-zA-Z0-9]*$", name):
|
||||
name = re.sub("[^a-zA-Z0-9]", "_", name)
|
||||
if name == "":
|
||||
name = "Unknown device"
|
||||
while name in _get_token_names():
|
||||
name += str(secrets.randbelow(10))
|
||||
return name
|
||||
|
||||
|
||||
def is_token_valid(token):
|
||||
"""Check if token is valid"""
|
||||
if token in _get_tokens():
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def is_token_name_exists(token_name):
|
||||
"""Check if token name exists"""
|
||||
with ReadUserData(UserDataFiles.TOKENS) as tokens:
|
||||
return token_name in [t["name"] for t in tokens["tokens"]]
|
||||
|
||||
|
||||
def is_token_name_pair_valid(token_name, token):
|
||||
"""Check if token name and token pair exists"""
|
||||
with ReadUserData(UserDataFiles.TOKENS) as tokens:
|
||||
for t in tokens["tokens"]:
|
||||
if t["name"] == token_name and t["token"] == token:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def get_token_name(token: str) -> typing.Optional[str]:
|
||||
"""Return the name of the token provided"""
|
||||
with ReadUserData(UserDataFiles.TOKENS) as tokens:
|
||||
for t in tokens["tokens"]:
|
||||
if t["token"] == token:
|
||||
return t["name"]
|
||||
return None
|
||||
|
||||
|
||||
class BasicTokenInfo(BaseModel):
|
||||
"""Token info"""
|
||||
|
||||
name: str
|
||||
date: datetime
|
||||
|
||||
|
||||
def get_tokens_info():
|
||||
"""Get all tokens info without tokens themselves"""
|
||||
with ReadUserData(UserDataFiles.TOKENS) as tokens:
|
||||
return [
|
||||
BasicTokenInfo(
|
||||
name=t["name"],
|
||||
date=parse_date(t["date"]),
|
||||
)
|
||||
for t in tokens["tokens"]
|
||||
]
|
||||
|
||||
|
||||
def _generate_token():
|
||||
"""Generates new token and makes sure it is unique"""
|
||||
token = secrets.token_urlsafe(32)
|
||||
while token in _get_tokens():
|
||||
token = secrets.token_urlsafe(32)
|
||||
return token
|
||||
|
||||
|
||||
def create_token(name):
|
||||
"""Create new token"""
|
||||
token = _generate_token()
|
||||
name = _validate_token_name(name)
|
||||
with WriteUserData(UserDataFiles.TOKENS) as tokens:
|
||||
tokens["tokens"].append(
|
||||
{
|
||||
"token": token,
|
||||
"name": name,
|
||||
"date": str(datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f")),
|
||||
}
|
||||
)
|
||||
return token
|
||||
|
||||
|
||||
def delete_token(token_name):
|
||||
"""Delete token"""
|
||||
with WriteUserData(UserDataFiles.TOKENS) as tokens:
|
||||
tokens["tokens"] = [t for t in tokens["tokens"] if t["name"] != token_name]
|
||||
|
||||
|
||||
def refresh_token(token: str) -> typing.Optional[str]:
|
||||
"""Change the token field of the existing token"""
|
||||
new_token = _generate_token()
|
||||
with WriteUserData(UserDataFiles.TOKENS) as tokens:
|
||||
for t in tokens["tokens"]:
|
||||
if t["token"] == token:
|
||||
t["token"] = new_token
|
||||
return new_token
|
||||
return None
|
||||
|
||||
|
||||
def is_recovery_token_exists():
|
||||
"""Check if recovery token exists"""
|
||||
with ReadUserData(UserDataFiles.TOKENS) as tokens:
|
||||
return "recovery_token" in tokens
|
||||
|
||||
|
||||
def is_recovery_token_valid():
|
||||
"""Check if recovery token is valid"""
|
||||
with ReadUserData(UserDataFiles.TOKENS) as tokens:
|
||||
if "recovery_token" not in tokens:
|
||||
return False
|
||||
recovery_token = tokens["recovery_token"]
|
||||
if "uses_left" in recovery_token and recovery_token["uses_left"] is not None:
|
||||
if recovery_token["uses_left"] <= 0:
|
||||
return False
|
||||
if "expiration" not in recovery_token or recovery_token["expiration"] is None:
|
||||
return True
|
||||
return datetime.now() < parse_date(recovery_token["expiration"])
|
||||
|
||||
|
||||
def get_recovery_token_status():
|
||||
"""Get recovery token date of creation, expiration and uses left"""
|
||||
with ReadUserData(UserDataFiles.TOKENS) as tokens:
|
||||
if "recovery_token" not in tokens:
|
||||
return None
|
||||
recovery_token = tokens["recovery_token"]
|
||||
return {
|
||||
"date": recovery_token["date"],
|
||||
"expiration": recovery_token["expiration"]
|
||||
if "expiration" in recovery_token
|
||||
else None,
|
||||
"uses_left": recovery_token["uses_left"]
|
||||
if "uses_left" in recovery_token
|
||||
else None,
|
||||
}
|
||||
|
||||
|
||||
def _get_recovery_token():
|
||||
"""Get recovery token"""
|
||||
with ReadUserData(UserDataFiles.TOKENS) as tokens:
|
||||
if "recovery_token" not in tokens:
|
||||
return None
|
||||
return tokens["recovery_token"]["token"]
|
||||
|
||||
|
||||
def generate_recovery_token(
|
||||
expiration: typing.Optional[datetime], uses_left: typing.Optional[int]
|
||||
) -> str:
|
||||
"""Generate a 24 bytes recovery token and return a mneomnic word list.
|
||||
Write a string representation of the recovery token to the tokens.json file.
|
||||
"""
|
||||
# expires must be a date or None
|
||||
# uses_left must be an integer or None
|
||||
if expiration is not None:
|
||||
if not isinstance(expiration, datetime):
|
||||
raise TypeError("expires must be a datetime object")
|
||||
if uses_left is not None:
|
||||
if not isinstance(uses_left, int):
|
||||
raise TypeError("uses_left must be an integer")
|
||||
if uses_left <= 0:
|
||||
raise ValueError("uses_left must be greater than 0")
|
||||
|
||||
recovery_token = secrets.token_bytes(24)
|
||||
recovery_token_str = recovery_token.hex()
|
||||
with WriteUserData(UserDataFiles.TOKENS) as tokens:
|
||||
tokens["recovery_token"] = {
|
||||
"token": recovery_token_str,
|
||||
"date": str(datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f")),
|
||||
"expiration": expiration.strftime("%Y-%m-%dT%H:%M:%S.%f")
|
||||
if expiration is not None
|
||||
else None,
|
||||
"uses_left": uses_left if uses_left is not None else None,
|
||||
}
|
||||
return Mnemonic(language="english").to_mnemonic(recovery_token)
|
||||
|
||||
|
||||
def use_mnemonic_recoverery_token(mnemonic_phrase, name):
|
||||
"""Use the recovery token by converting the mnemonic word list to a byte array.
|
||||
If the recovery token if invalid itself, return None
|
||||
If the binary representation of phrase not matches
|
||||
the byte array of the recovery token, return None.
|
||||
If the mnemonic phrase is valid then generate a device token and return it.
|
||||
Substract 1 from uses_left if it exists.
|
||||
mnemonic_phrase is a string representation of the mnemonic word list.
|
||||
"""
|
||||
if not is_recovery_token_valid():
|
||||
return None
|
||||
recovery_token_str = _get_recovery_token()
|
||||
if recovery_token_str is None:
|
||||
return None
|
||||
recovery_token = bytes.fromhex(recovery_token_str)
|
||||
if not Mnemonic(language="english").check(mnemonic_phrase):
|
||||
return None
|
||||
phrase_bytes = Mnemonic(language="english").to_entropy(mnemonic_phrase)
|
||||
if phrase_bytes != recovery_token:
|
||||
return None
|
||||
token = _generate_token()
|
||||
name = _validate_token_name(name)
|
||||
with WriteUserData(UserDataFiles.TOKENS) as tokens:
|
||||
tokens["tokens"].append(
|
||||
{
|
||||
"token": token,
|
||||
"name": name,
|
||||
"date": str(datetime.now()),
|
||||
}
|
||||
)
|
||||
if "recovery_token" in tokens:
|
||||
if (
|
||||
"uses_left" in tokens["recovery_token"]
|
||||
and tokens["recovery_token"]["uses_left"] is not None
|
||||
):
|
||||
tokens["recovery_token"]["uses_left"] -= 1
|
||||
return token
|
||||
|
||||
|
||||
def get_new_device_auth_token() -> str:
|
||||
"""Generate a new device auth token which is valid for 10 minutes
|
||||
and return a mnemonic phrase representation
|
||||
Write token to the new_device of the tokens.json file.
|
||||
"""
|
||||
token = secrets.token_bytes(16)
|
||||
token_str = token.hex()
|
||||
with WriteUserData(UserDataFiles.TOKENS) as tokens:
|
||||
tokens["new_device"] = {
|
||||
"token": token_str,
|
||||
"date": str(datetime.now()),
|
||||
"expiration": str(datetime.now() + timedelta(minutes=10)),
|
||||
}
|
||||
return Mnemonic(language="english").to_mnemonic(token)
|
||||
|
||||
|
||||
def _get_new_device_auth_token():
|
||||
"""Get new device auth token. If it is expired, return None"""
|
||||
with ReadUserData(UserDataFiles.TOKENS) as tokens:
|
||||
if "new_device" not in tokens:
|
||||
return None
|
||||
new_device = tokens["new_device"]
|
||||
if "expiration" not in new_device:
|
||||
return None
|
||||
expiration = parse_date(new_device["expiration"])
|
||||
if datetime.now() > expiration:
|
||||
return None
|
||||
return new_device["token"]
|
||||
|
||||
|
||||
def delete_new_device_auth_token():
|
||||
"""Delete new device auth token"""
|
||||
with WriteUserData(UserDataFiles.TOKENS) as tokens:
|
||||
if "new_device" in tokens:
|
||||
del tokens["new_device"]
|
||||
|
||||
|
||||
def use_new_device_auth_token(mnemonic_phrase, name):
|
||||
"""Use the new device auth token by converting the mnemonic string to a byte array.
|
||||
If the mnemonic phrase is valid then generate a device token and return it.
|
||||
New device auth token must be deleted.
|
||||
"""
|
||||
token_str = _get_new_device_auth_token()
|
||||
if token_str is None:
|
||||
return None
|
||||
token = bytes.fromhex(token_str)
|
||||
if not Mnemonic(language="english").check(mnemonic_phrase):
|
||||
return None
|
||||
phrase_bytes = Mnemonic(language="english").to_entropy(mnemonic_phrase)
|
||||
if phrase_bytes != token:
|
||||
return None
|
||||
token = create_token(name)
|
||||
with WriteUserData(UserDataFiles.TOKENS) as tokens:
|
||||
if "new_device" in tokens:
|
||||
del tokens["new_device"]
|
||||
return token
|
|
@ -4,6 +4,7 @@ import json
|
|||
import typing
|
||||
|
||||
from selfprivacy_api.utils import WriteUserData
|
||||
from selfprivacy_api.utils.singleton_metaclass import SingletonMetaclass
|
||||
|
||||
|
||||
def get_block_device(device_name):
|
||||
|
@ -147,16 +148,9 @@ class BlockDevice:
|
|||
return False
|
||||
|
||||
|
||||
class BlockDevices:
|
||||
class BlockDevices(metaclass=SingletonMetaclass):
|
||||
"""Singleton holding all Block devices"""
|
||||
|
||||
_instance = None
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
if not cls._instance:
|
||||
cls._instance = super().__new__(cls)
|
||||
return cls._instance
|
||||
|
||||
def __init__(self):
|
||||
self.block_devices = []
|
||||
self.update()
|
||||
|
|
|
@ -0,0 +1,41 @@
|
|||
"""
|
||||
Redis pool module for selfprivacy_api
|
||||
"""
|
||||
import redis
|
||||
from selfprivacy_api.utils.singleton_metaclass import SingletonMetaclass
|
||||
from os import environ
|
||||
|
||||
REDIS_SOCKET = "/run/redis-sp-api/redis.sock"
|
||||
|
||||
|
||||
class RedisPool(metaclass=SingletonMetaclass):
|
||||
"""
|
||||
Redis connection pool singleton.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
if "USE_REDIS_PORT" in environ.keys():
|
||||
self._pool = redis.ConnectionPool(
|
||||
host="127.0.0.1",
|
||||
port=int(environ["USE_REDIS_PORT"]),
|
||||
decode_responses=True,
|
||||
)
|
||||
|
||||
else:
|
||||
self._pool = redis.ConnectionPool.from_url(
|
||||
f"unix://{REDIS_SOCKET}",
|
||||
decode_responses=True,
|
||||
)
|
||||
self._pubsub_connection = self.get_connection()
|
||||
|
||||
def get_connection(self):
|
||||
"""
|
||||
Get a connection from the pool.
|
||||
"""
|
||||
return redis.Redis(connection_pool=self._pool)
|
||||
|
||||
def get_pubsub(self):
|
||||
"""
|
||||
Get a pubsub connection from the pool.
|
||||
"""
|
||||
return self._pubsub_connection.pubsub()
|
|
@ -0,0 +1,23 @@
|
|||
"""
|
||||
Singleton is a creational design pattern, which ensures that only
|
||||
one object of its kind exists and provides a single point of access
|
||||
to it for any other code.
|
||||
"""
|
||||
from threading import Lock
|
||||
|
||||
|
||||
class SingletonMetaclass(type):
|
||||
"""
|
||||
This is a thread-safe implementation of Singleton.
|
||||
"""
|
||||
|
||||
_instances = {}
|
||||
_lock: Lock = Lock()
|
||||
|
||||
def __call__(cls, *args, **kwargs):
|
||||
with cls._lock:
|
||||
if cls not in cls._instances:
|
||||
cls._instances[cls] = super(SingletonMetaclass, cls).__call__(
|
||||
*args, **kwargs
|
||||
)
|
||||
return cls._instances[cls]
|
2
setup.py
2
setup.py
|
@ -2,7 +2,7 @@ from setuptools import setup, find_packages
|
|||
|
||||
setup(
|
||||
name="selfprivacy_api",
|
||||
version="2.0.0",
|
||||
version="2.1.0",
|
||||
packages=find_packages(),
|
||||
scripts=[
|
||||
"selfprivacy_api/app.py",
|
||||
|
|
|
@ -18,6 +18,7 @@ let
|
|||
black
|
||||
fastapi
|
||||
uvicorn
|
||||
redis
|
||||
(buildPythonPackage rec {
|
||||
pname = "strawberry-graphql";
|
||||
version = "0.123.0";
|
||||
|
@ -63,9 +64,15 @@ pkgs.mkShell {
|
|||
buildInputs = [
|
||||
sp-python
|
||||
pkgs.black
|
||||
pkgs.redis
|
||||
];
|
||||
shellHook = ''
|
||||
PYTHONPATH=${sp-python}/${sp-python.sitePackages}
|
||||
# envs set with export and as attributes are treated differently.
|
||||
# for example. printenv <Name> will not fetch the value of an attribute.
|
||||
export USE_REDIS_PORT=6379
|
||||
pkill redis-server
|
||||
redis-server --bind 127.0.0.1 --port $USE_REDIS_PORT >/dev/null &
|
||||
# maybe set more env-vars
|
||||
'';
|
||||
}
|
||||
|
|
|
@ -1,6 +1,22 @@
|
|||
import json
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from mnemonic import Mnemonic
|
||||
|
||||
# for expiration tests. If headache, consider freezegun
|
||||
RECOVERY_KEY_VALIDATION_DATETIME = "selfprivacy_api.models.tokens.time.datetime"
|
||||
DEVICE_KEY_VALIDATION_DATETIME = RECOVERY_KEY_VALIDATION_DATETIME
|
||||
|
||||
FIVE_MINUTES_INTO_FUTURE_NAIVE = datetime.now() + timedelta(minutes=5)
|
||||
FIVE_MINUTES_INTO_FUTURE = datetime.now(timezone.utc) + timedelta(minutes=5)
|
||||
FIVE_MINUTES_INTO_PAST_NAIVE = datetime.now() - timedelta(minutes=5)
|
||||
FIVE_MINUTES_INTO_PAST = datetime.now(timezone.utc) - timedelta(minutes=5)
|
||||
|
||||
|
||||
class NearFuture(datetime):
|
||||
@classmethod
|
||||
def now(cls, tz=None):
|
||||
return datetime.now(tz) + timedelta(minutes=13)
|
||||
|
||||
|
||||
def read_json(file_path):
|
||||
with open(file_path, "r", encoding="utf-8") as file:
|
||||
|
@ -26,3 +42,10 @@ def generate_users_query(query_array):
|
|||
|
||||
def mnemonic_to_hex(mnemonic):
|
||||
return Mnemonic(language="english").to_entropy(mnemonic).hex()
|
||||
|
||||
|
||||
def assert_recovery_recent(time_generated):
|
||||
assert (
|
||||
datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%f") - timedelta(seconds=5)
|
||||
< datetime.now()
|
||||
)
|
||||
|
|
|
@ -4,19 +4,88 @@
|
|||
import os
|
||||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
import os.path as path
|
||||
import datetime
|
||||
|
||||
from selfprivacy_api.models.tokens.token import Token
|
||||
from selfprivacy_api.repositories.tokens.json_tokens_repository import (
|
||||
JsonTokensRepository,
|
||||
)
|
||||
from selfprivacy_api.repositories.tokens.redis_tokens_repository import (
|
||||
RedisTokensRepository,
|
||||
)
|
||||
|
||||
from tests.common import read_json
|
||||
|
||||
EMPTY_TOKENS_JSON = ' {"tokens": []}'
|
||||
|
||||
|
||||
TOKENS_FILE_CONTENTS = {
|
||||
"tokens": [
|
||||
{
|
||||
"token": "TEST_TOKEN",
|
||||
"name": "test_token",
|
||||
"date": datetime.datetime(2022, 1, 14, 8, 31, 10, 789314),
|
||||
},
|
||||
{
|
||||
"token": "TEST_TOKEN2",
|
||||
"name": "test_token2",
|
||||
"date": datetime.datetime(2022, 1, 14, 8, 31, 10, 789314),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
DEVICE_WE_AUTH_TESTS_WITH = TOKENS_FILE_CONTENTS["tokens"][0]
|
||||
|
||||
|
||||
def pytest_generate_tests(metafunc):
|
||||
os.environ["TEST_MODE"] = "true"
|
||||
|
||||
|
||||
def global_data_dir():
|
||||
return path.join(path.dirname(__file__), "data")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def tokens_file(mocker, shared_datadir):
|
||||
"""Mock tokens file."""
|
||||
mock = mocker.patch(
|
||||
"selfprivacy_api.utils.TOKENS_FILE", shared_datadir / "tokens.json"
|
||||
)
|
||||
return mock
|
||||
def empty_tokens(mocker, tmpdir):
|
||||
tokenfile = tmpdir / "empty_tokens.json"
|
||||
with open(tokenfile, "w") as file:
|
||||
file.write(EMPTY_TOKENS_JSON)
|
||||
mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=tokenfile)
|
||||
assert read_json(tokenfile)["tokens"] == []
|
||||
return tmpdir
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def empty_json_repo(empty_tokens):
|
||||
repo = JsonTokensRepository()
|
||||
for token in repo.get_tokens():
|
||||
repo.delete_token(token)
|
||||
assert repo.get_tokens() == []
|
||||
return repo
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def empty_redis_repo():
|
||||
repo = RedisTokensRepository()
|
||||
repo.reset()
|
||||
assert repo.get_tokens() == []
|
||||
return repo
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def tokens_file(empty_redis_repo, tmpdir):
|
||||
"""A state with tokens"""
|
||||
repo = empty_redis_repo
|
||||
for token in TOKENS_FILE_CONTENTS["tokens"]:
|
||||
repo._store_token(
|
||||
Token(
|
||||
token=token["token"],
|
||||
device_name=token["name"],
|
||||
created_at=token["date"],
|
||||
)
|
||||
)
|
||||
return repo
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
@ -48,7 +117,9 @@ def authorized_client(tokens_file, huey_database, jobs_file):
|
|||
from selfprivacy_api.app import app
|
||||
|
||||
client = TestClient(app)
|
||||
client.headers.update({"Authorization": "Bearer TEST_TOKEN"})
|
||||
client.headers.update(
|
||||
{"Authorization": "Bearer " + DEVICE_WE_AUTH_TESTS_WITH["token"]}
|
||||
)
|
||||
return client
|
||||
|
||||
|
||||
|
|
|
@ -1,9 +1,4 @@
|
|||
{
|
||||
"backblaze": {
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
},
|
||||
"api": {
|
||||
"token": "TEST_TOKEN",
|
||||
"enableSwagger": false
|
||||
|
@ -11,9 +6,6 @@
|
|||
"bitwarden": {
|
||||
"enable": true
|
||||
},
|
||||
"cloudflare": {
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"databasePassword": "PASSWORD",
|
||||
"domain": "test.tld",
|
||||
"hashedMasterPassword": "HASHED_PASSWORD",
|
||||
|
@ -49,6 +41,19 @@
|
|||
"sshKeys": [
|
||||
"ssh-rsa KEY test@pc"
|
||||
],
|
||||
"dns": {
|
||||
"provider": "CLOUDFLARE",
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"server": {
|
||||
"provider": "HETZNER"
|
||||
},
|
||||
"backup": {
|
||||
"provider": "BACKBLAZE",
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
},
|
||||
"volumes": [
|
||||
]
|
||||
}
|
||||
|
|
|
@ -1,9 +1,4 @@
|
|||
{
|
||||
"backblaze": {
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
},
|
||||
"api": {
|
||||
"token": "TEST_TOKEN",
|
||||
"enableSwagger": false
|
||||
|
@ -11,9 +6,6 @@
|
|||
"bitwarden": {
|
||||
"enable": true
|
||||
},
|
||||
"cloudflare": {
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"databasePassword": "PASSWORD",
|
||||
"domain": "test.tld",
|
||||
"hashedMasterPassword": "HASHED_PASSWORD",
|
||||
|
@ -55,5 +47,18 @@
|
|||
"mountPoint": "/volumes/sda1",
|
||||
"filesystem": "ext4"
|
||||
}
|
||||
]
|
||||
],
|
||||
"dns": {
|
||||
"provider": "CLOUDFLARE",
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"server": {
|
||||
"provider": "HETZNER"
|
||||
},
|
||||
"backup": {
|
||||
"provider": "BACKBLAZE",
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,9 +1,4 @@
|
|||
{
|
||||
"backblaze": {
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
},
|
||||
"api": {
|
||||
"token": "TEST_TOKEN",
|
||||
"enableSwagger": false
|
||||
|
@ -11,9 +6,6 @@
|
|||
"bitwarden": {
|
||||
"enable": true
|
||||
},
|
||||
"cloudflare": {
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"databasePassword": "PASSWORD",
|
||||
"domain": "test.tld",
|
||||
"hashedMasterPassword": "HASHED_PASSWORD",
|
||||
|
@ -48,5 +40,18 @@
|
|||
"timezone": "Europe/Moscow",
|
||||
"sshKeys": [
|
||||
"ssh-rsa KEY test@pc"
|
||||
]
|
||||
],
|
||||
"dns": {
|
||||
"provider": "CLOUDFLARE",
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"server": {
|
||||
"provider": "HETZNER"
|
||||
},
|
||||
"backup": {
|
||||
"provider": "BACKBLAZE",
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,88 @@
|
|||
from tests.common import generate_api_query
|
||||
from tests.conftest import TOKENS_FILE_CONTENTS, DEVICE_WE_AUTH_TESTS_WITH
|
||||
|
||||
ORIGINAL_DEVICES = TOKENS_FILE_CONTENTS["tokens"]
|
||||
|
||||
|
||||
def assert_ok(response, request):
|
||||
data = assert_data(response)
|
||||
data[request]["success"] is True
|
||||
data[request]["message"] is not None
|
||||
data[request]["code"] == 200
|
||||
|
||||
|
||||
def assert_errorcode(response, request, code):
|
||||
data = assert_data(response)
|
||||
data[request]["success"] is False
|
||||
data[request]["message"] is not None
|
||||
data[request]["code"] == code
|
||||
|
||||
|
||||
def assert_empty(response):
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is None
|
||||
|
||||
|
||||
def assert_data(response):
|
||||
assert response.status_code == 200
|
||||
data = response.json().get("data")
|
||||
assert data is not None
|
||||
return data
|
||||
|
||||
|
||||
API_DEVICES_QUERY = """
|
||||
devices {
|
||||
creationDate
|
||||
isCaller
|
||||
name
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
def request_devices(client):
|
||||
return client.post(
|
||||
"/graphql",
|
||||
json={"query": generate_api_query([API_DEVICES_QUERY])},
|
||||
)
|
||||
|
||||
|
||||
def graphql_get_devices(client):
|
||||
response = request_devices(client)
|
||||
data = assert_data(response)
|
||||
devices = data["api"]["devices"]
|
||||
assert devices is not None
|
||||
return devices
|
||||
|
||||
|
||||
def set_client_token(client, token):
|
||||
client.headers.update({"Authorization": "Bearer " + token})
|
||||
|
||||
|
||||
def assert_token_valid(client, token):
|
||||
set_client_token(client, token)
|
||||
assert graphql_get_devices(client) is not None
|
||||
|
||||
|
||||
def assert_same(graphql_devices, abstract_devices):
|
||||
"""Orderless comparison"""
|
||||
assert len(graphql_devices) == len(abstract_devices)
|
||||
for original_device in abstract_devices:
|
||||
assert original_device["name"] in [device["name"] for device in graphql_devices]
|
||||
for device in graphql_devices:
|
||||
if device["name"] == original_device["name"]:
|
||||
assert device["creationDate"] == original_device["date"].isoformat()
|
||||
|
||||
|
||||
def assert_original(client):
|
||||
devices = graphql_get_devices(client)
|
||||
assert_original_devices(devices)
|
||||
|
||||
|
||||
def assert_original_devices(devices):
|
||||
assert_same(devices, ORIGINAL_DEVICES)
|
||||
|
||||
for device in devices:
|
||||
if device["name"] == DEVICE_WE_AUTH_TESTS_WITH["name"]:
|
||||
assert device["isCaller"] is True
|
||||
else:
|
||||
assert device["isCaller"] is False
|
|
@ -1,14 +0,0 @@
|
|||
{
|
||||
"tokens": [
|
||||
{
|
||||
"token": "TEST_TOKEN",
|
||||
"name": "test_token",
|
||||
"date": "2022-01-14 08:31:10.789314"
|
||||
},
|
||||
{
|
||||
"token": "TEST_TOKEN2",
|
||||
"name": "test_token2",
|
||||
"date": "2022-01-14 08:31:10.789314"
|
||||
}
|
||||
]
|
||||
}
|
|
@ -3,25 +3,11 @@
|
|||
# pylint: disable=missing-function-docstring
|
||||
|
||||
from tests.common import generate_api_query
|
||||
from tests.test_graphql.common import assert_original_devices
|
||||
from tests.test_graphql.test_api_devices import API_DEVICES_QUERY
|
||||
from tests.test_graphql.test_api_recovery import API_RECOVERY_QUERY
|
||||
from tests.test_graphql.test_api_version import API_VERSION_QUERY
|
||||
|
||||
TOKENS_FILE_CONTETS = {
|
||||
"tokens": [
|
||||
{
|
||||
"token": "TEST_TOKEN",
|
||||
"name": "test_token",
|
||||
"date": "2022-01-14 08:31:10.789314",
|
||||
},
|
||||
{
|
||||
"token": "TEST_TOKEN2",
|
||||
"name": "test_token2",
|
||||
"date": "2022-01-14 08:31:10.789314",
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
def test_graphql_get_entire_api_data(authorized_client, tokens_file):
|
||||
response = authorized_client.post(
|
||||
|
@ -35,20 +21,11 @@ def test_graphql_get_entire_api_data(authorized_client, tokens_file):
|
|||
assert response.status_code == 200
|
||||
assert response.json().get("data") is not None
|
||||
assert "version" in response.json()["data"]["api"]
|
||||
assert response.json()["data"]["api"]["devices"] is not None
|
||||
assert len(response.json()["data"]["api"]["devices"]) == 2
|
||||
assert (
|
||||
response.json()["data"]["api"]["devices"][0]["creationDate"]
|
||||
== "2022-01-14T08:31:10.789314"
|
||||
)
|
||||
assert response.json()["data"]["api"]["devices"][0]["isCaller"] is True
|
||||
assert response.json()["data"]["api"]["devices"][0]["name"] == "test_token"
|
||||
assert (
|
||||
response.json()["data"]["api"]["devices"][1]["creationDate"]
|
||||
== "2022-01-14T08:31:10.789314"
|
||||
)
|
||||
assert response.json()["data"]["api"]["devices"][1]["isCaller"] is False
|
||||
assert response.json()["data"]["api"]["devices"][1]["name"] == "test_token2"
|
||||
|
||||
devices = response.json()["data"]["api"]["devices"]
|
||||
assert devices is not None
|
||||
assert_original_devices(devices)
|
||||
|
||||
assert response.json()["data"]["api"]["recoveryKey"] is not None
|
||||
assert response.json()["data"]["api"]["recoveryKey"]["exists"] is False
|
||||
assert response.json()["data"]["api"]["recoveryKey"]["valid"] is False
|
||||
|
|
|
@ -1,65 +1,77 @@
|
|||
# pylint: disable=redefined-outer-name
|
||||
# pylint: disable=unused-argument
|
||||
# pylint: disable=missing-function-docstring
|
||||
import datetime
|
||||
from mnemonic import Mnemonic
|
||||
from tests.common import (
|
||||
RECOVERY_KEY_VALIDATION_DATETIME,
|
||||
DEVICE_KEY_VALIDATION_DATETIME,
|
||||
NearFuture,
|
||||
generate_api_query,
|
||||
)
|
||||
from tests.conftest import DEVICE_WE_AUTH_TESTS_WITH, TOKENS_FILE_CONTENTS
|
||||
from tests.test_graphql.common import (
|
||||
assert_data,
|
||||
assert_empty,
|
||||
assert_ok,
|
||||
assert_errorcode,
|
||||
assert_token_valid,
|
||||
assert_original,
|
||||
assert_same,
|
||||
graphql_get_devices,
|
||||
request_devices,
|
||||
set_client_token,
|
||||
API_DEVICES_QUERY,
|
||||
ORIGINAL_DEVICES,
|
||||
)
|
||||
|
||||
from tests.common import generate_api_query, read_json, write_json
|
||||
|
||||
TOKENS_FILE_CONTETS = {
|
||||
"tokens": [
|
||||
{
|
||||
"token": "TEST_TOKEN",
|
||||
"name": "test_token",
|
||||
"date": "2022-01-14 08:31:10.789314",
|
||||
def graphql_get_caller_token_info(client):
|
||||
devices = graphql_get_devices(client)
|
||||
for device in devices:
|
||||
if device["isCaller"] is True:
|
||||
return device
|
||||
|
||||
|
||||
def graphql_get_new_device_key(authorized_client) -> str:
|
||||
response = authorized_client.post(
|
||||
"/graphql",
|
||||
json={"query": NEW_DEVICE_KEY_MUTATION},
|
||||
)
|
||||
assert_ok(response, "getNewDeviceApiKey")
|
||||
|
||||
key = response.json()["data"]["getNewDeviceApiKey"]["key"]
|
||||
assert key.split(" ").__len__() == 12
|
||||
return key
|
||||
|
||||
|
||||
def graphql_try_auth_new_device(client, mnemonic_key, device_name):
|
||||
return client.post(
|
||||
"/graphql",
|
||||
json={
|
||||
"query": AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION,
|
||||
"variables": {
|
||||
"input": {
|
||||
"key": mnemonic_key,
|
||||
"deviceName": device_name,
|
||||
}
|
||||
},
|
||||
},
|
||||
{
|
||||
"token": "TEST_TOKEN2",
|
||||
"name": "test_token2",
|
||||
"date": "2022-01-14 08:31:10.789314",
|
||||
},
|
||||
]
|
||||
}
|
||||
)
|
||||
|
||||
API_DEVICES_QUERY = """
|
||||
devices {
|
||||
creationDate
|
||||
isCaller
|
||||
name
|
||||
}
|
||||
"""
|
||||
|
||||
def graphql_authorize_new_device(client, mnemonic_key, device_name) -> str:
|
||||
response = graphql_try_auth_new_device(client, mnemonic_key, "new_device")
|
||||
assert_ok(response, "authorizeWithNewDeviceApiKey")
|
||||
token = response.json()["data"]["authorizeWithNewDeviceApiKey"]["token"]
|
||||
assert_token_valid(client, token)
|
||||
|
||||
|
||||
def test_graphql_tokens_info(authorized_client, tokens_file):
|
||||
response = authorized_client.post(
|
||||
"/graphql",
|
||||
json={"query": generate_api_query([API_DEVICES_QUERY])},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is not None
|
||||
assert response.json()["data"]["api"]["devices"] is not None
|
||||
assert len(response.json()["data"]["api"]["devices"]) == 2
|
||||
assert (
|
||||
response.json()["data"]["api"]["devices"][0]["creationDate"]
|
||||
== "2022-01-14T08:31:10.789314"
|
||||
)
|
||||
assert response.json()["data"]["api"]["devices"][0]["isCaller"] is True
|
||||
assert response.json()["data"]["api"]["devices"][0]["name"] == "test_token"
|
||||
assert (
|
||||
response.json()["data"]["api"]["devices"][1]["creationDate"]
|
||||
== "2022-01-14T08:31:10.789314"
|
||||
)
|
||||
assert response.json()["data"]["api"]["devices"][1]["isCaller"] is False
|
||||
assert response.json()["data"]["api"]["devices"][1]["name"] == "test_token2"
|
||||
assert_original(authorized_client)
|
||||
|
||||
|
||||
def test_graphql_tokens_info_unauthorized(client, tokens_file):
|
||||
response = client.post(
|
||||
"/graphql",
|
||||
json={"query": generate_api_query([API_DEVICES_QUERY])},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json()["data"] is None
|
||||
response = request_devices(client)
|
||||
assert_empty(response)
|
||||
|
||||
|
||||
DELETE_TOKEN_MUTATION = """
|
||||
|
@ -83,34 +95,27 @@ def test_graphql_delete_token_unauthorized(client, tokens_file):
|
|||
},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json()["data"] is None
|
||||
assert_empty(response)
|
||||
|
||||
|
||||
def test_graphql_delete_token(authorized_client, tokens_file):
|
||||
test_devices = ORIGINAL_DEVICES.copy()
|
||||
device_to_delete = test_devices.pop(1)
|
||||
assert device_to_delete != DEVICE_WE_AUTH_TESTS_WITH
|
||||
|
||||
response = authorized_client.post(
|
||||
"/graphql",
|
||||
json={
|
||||
"query": DELETE_TOKEN_MUTATION,
|
||||
"variables": {
|
||||
"device": "test_token2",
|
||||
"device": device_to_delete["name"],
|
||||
},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is not None
|
||||
assert response.json()["data"]["deleteDeviceApiToken"]["success"] is True
|
||||
assert response.json()["data"]["deleteDeviceApiToken"]["message"] is not None
|
||||
assert response.json()["data"]["deleteDeviceApiToken"]["code"] == 200
|
||||
assert read_json(tokens_file) == {
|
||||
"tokens": [
|
||||
{
|
||||
"token": "TEST_TOKEN",
|
||||
"name": "test_token",
|
||||
"date": "2022-01-14 08:31:10.789314",
|
||||
}
|
||||
]
|
||||
}
|
||||
assert_ok(response, "deleteDeviceApiToken")
|
||||
|
||||
devices = graphql_get_devices(authorized_client)
|
||||
assert_same(devices, test_devices)
|
||||
|
||||
|
||||
def test_graphql_delete_self_token(authorized_client, tokens_file):
|
||||
|
@ -119,16 +124,12 @@ def test_graphql_delete_self_token(authorized_client, tokens_file):
|
|||
json={
|
||||
"query": DELETE_TOKEN_MUTATION,
|
||||
"variables": {
|
||||
"device": "test_token",
|
||||
"device": DEVICE_WE_AUTH_TESTS_WITH["name"],
|
||||
},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is not None
|
||||
assert response.json()["data"]["deleteDeviceApiToken"]["success"] is False
|
||||
assert response.json()["data"]["deleteDeviceApiToken"]["message"] is not None
|
||||
assert response.json()["data"]["deleteDeviceApiToken"]["code"] == 400
|
||||
assert read_json(tokens_file) == TOKENS_FILE_CONTETS
|
||||
assert_errorcode(response, "deleteDeviceApiToken", 400)
|
||||
assert_original(authorized_client)
|
||||
|
||||
|
||||
def test_graphql_delete_nonexistent_token(authorized_client, tokens_file):
|
||||
|
@ -141,12 +142,9 @@ def test_graphql_delete_nonexistent_token(authorized_client, tokens_file):
|
|||
},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is not None
|
||||
assert response.json()["data"]["deleteDeviceApiToken"]["success"] is False
|
||||
assert response.json()["data"]["deleteDeviceApiToken"]["message"] is not None
|
||||
assert response.json()["data"]["deleteDeviceApiToken"]["code"] == 404
|
||||
assert read_json(tokens_file) == TOKENS_FILE_CONTETS
|
||||
assert_errorcode(response, "deleteDeviceApiToken", 404)
|
||||
|
||||
assert_original(authorized_client)
|
||||
|
||||
|
||||
REFRESH_TOKEN_MUTATION = """
|
||||
|
@ -166,25 +164,22 @@ def test_graphql_refresh_token_unauthorized(client, tokens_file):
|
|||
"/graphql",
|
||||
json={"query": REFRESH_TOKEN_MUTATION},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json()["data"] is None
|
||||
assert_empty(response)
|
||||
|
||||
|
||||
def test_graphql_refresh_token(authorized_client, tokens_file):
|
||||
def test_graphql_refresh_token(authorized_client, client, tokens_file):
|
||||
caller_name_and_date = graphql_get_caller_token_info(authorized_client)
|
||||
response = authorized_client.post(
|
||||
"/graphql",
|
||||
json={"query": REFRESH_TOKEN_MUTATION},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is not None
|
||||
assert response.json()["data"]["refreshDeviceApiToken"]["success"] is True
|
||||
assert response.json()["data"]["refreshDeviceApiToken"]["message"] is not None
|
||||
assert response.json()["data"]["refreshDeviceApiToken"]["code"] == 200
|
||||
assert read_json(tokens_file)["tokens"][0] == {
|
||||
"token": response.json()["data"]["refreshDeviceApiToken"]["token"],
|
||||
"name": "test_token",
|
||||
"date": "2022-01-14 08:31:10.789314",
|
||||
}
|
||||
assert_ok(response, "refreshDeviceApiToken")
|
||||
|
||||
new_token = response.json()["data"]["refreshDeviceApiToken"]["token"]
|
||||
assert_token_valid(client, new_token)
|
||||
|
||||
set_client_token(client, new_token)
|
||||
assert graphql_get_caller_token_info(client) == caller_name_and_date
|
||||
|
||||
|
||||
NEW_DEVICE_KEY_MUTATION = """
|
||||
|
@ -204,29 +199,7 @@ def test_graphql_get_new_device_auth_key_unauthorized(client, tokens_file):
|
|||
"/graphql",
|
||||
json={"query": NEW_DEVICE_KEY_MUTATION},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json()["data"] is None
|
||||
|
||||
|
||||
def test_graphql_get_new_device_auth_key(authorized_client, tokens_file):
|
||||
response = authorized_client.post(
|
||||
"/graphql",
|
||||
json={"query": NEW_DEVICE_KEY_MUTATION},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is not None
|
||||
assert response.json()["data"]["getNewDeviceApiKey"]["success"] is True
|
||||
assert response.json()["data"]["getNewDeviceApiKey"]["message"] is not None
|
||||
assert response.json()["data"]["getNewDeviceApiKey"]["code"] == 200
|
||||
assert (
|
||||
response.json()["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12
|
||||
)
|
||||
token = (
|
||||
Mnemonic(language="english")
|
||||
.to_entropy(response.json()["data"]["getNewDeviceApiKey"]["key"])
|
||||
.hex()
|
||||
)
|
||||
assert read_json(tokens_file)["new_device"]["token"] == token
|
||||
assert_empty(response)
|
||||
|
||||
|
||||
INVALIDATE_NEW_DEVICE_KEY_MUTATION = """
|
||||
|
@ -250,39 +223,20 @@ def test_graphql_invalidate_new_device_token_unauthorized(client, tokens_file):
|
|||
},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json()["data"] is None
|
||||
assert_empty(response)
|
||||
|
||||
|
||||
def test_graphql_get_and_delete_new_device_key(authorized_client, tokens_file):
|
||||
response = authorized_client.post(
|
||||
"/graphql",
|
||||
json={"query": NEW_DEVICE_KEY_MUTATION},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is not None
|
||||
assert response.json()["data"]["getNewDeviceApiKey"]["success"] is True
|
||||
assert response.json()["data"]["getNewDeviceApiKey"]["message"] is not None
|
||||
assert response.json()["data"]["getNewDeviceApiKey"]["code"] == 200
|
||||
assert (
|
||||
response.json()["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12
|
||||
)
|
||||
token = (
|
||||
Mnemonic(language="english")
|
||||
.to_entropy(response.json()["data"]["getNewDeviceApiKey"]["key"])
|
||||
.hex()
|
||||
)
|
||||
assert read_json(tokens_file)["new_device"]["token"] == token
|
||||
def test_graphql_get_and_delete_new_device_key(client, authorized_client, tokens_file):
|
||||
mnemonic_key = graphql_get_new_device_key(authorized_client)
|
||||
|
||||
response = authorized_client.post(
|
||||
"/graphql",
|
||||
json={"query": INVALIDATE_NEW_DEVICE_KEY_MUTATION},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is not None
|
||||
assert response.json()["data"]["invalidateNewDeviceApiKey"]["success"] is True
|
||||
assert response.json()["data"]["invalidateNewDeviceApiKey"]["message"] is not None
|
||||
assert response.json()["data"]["invalidateNewDeviceApiKey"]["code"] == 200
|
||||
assert read_json(tokens_file) == TOKENS_FILE_CONTETS
|
||||
assert_ok(response, "invalidateNewDeviceApiKey")
|
||||
|
||||
response = graphql_try_auth_new_device(client, mnemonic_key, "new_device")
|
||||
assert_errorcode(response, "authorizeWithNewDeviceApiKey", 404)
|
||||
|
||||
|
||||
AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION = """
|
||||
|
@ -298,174 +252,45 @@ mutation AuthorizeWithNewDeviceKey($input: UseNewDeviceKeyInput!) {
|
|||
|
||||
|
||||
def test_graphql_get_and_authorize_new_device(client, authorized_client, tokens_file):
|
||||
response = authorized_client.post(
|
||||
"/graphql",
|
||||
json={"query": NEW_DEVICE_KEY_MUTATION},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is not None
|
||||
assert response.json()["data"]["getNewDeviceApiKey"]["success"] is True
|
||||
assert response.json()["data"]["getNewDeviceApiKey"]["message"] is not None
|
||||
assert response.json()["data"]["getNewDeviceApiKey"]["code"] == 200
|
||||
mnemonic_key = response.json()["data"]["getNewDeviceApiKey"]["key"]
|
||||
assert mnemonic_key.split(" ").__len__() == 12
|
||||
key = Mnemonic(language="english").to_entropy(mnemonic_key).hex()
|
||||
assert read_json(tokens_file)["new_device"]["token"] == key
|
||||
response = client.post(
|
||||
"/graphql",
|
||||
json={
|
||||
"query": AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION,
|
||||
"variables": {
|
||||
"input": {
|
||||
"key": mnemonic_key,
|
||||
"deviceName": "new_device",
|
||||
}
|
||||
},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is not None
|
||||
assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["success"] is True
|
||||
assert (
|
||||
response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None
|
||||
)
|
||||
assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 200
|
||||
token = response.json()["data"]["authorizeWithNewDeviceApiKey"]["token"]
|
||||
assert read_json(tokens_file)["tokens"][2]["token"] == token
|
||||
assert read_json(tokens_file)["tokens"][2]["name"] == "new_device"
|
||||
mnemonic_key = graphql_get_new_device_key(authorized_client)
|
||||
old_devices = graphql_get_devices(authorized_client)
|
||||
|
||||
graphql_authorize_new_device(client, mnemonic_key, "new_device")
|
||||
new_devices = graphql_get_devices(authorized_client)
|
||||
|
||||
assert len(new_devices) == len(old_devices) + 1
|
||||
assert "new_device" in [device["name"] for device in new_devices]
|
||||
|
||||
|
||||
def test_graphql_authorize_new_device_with_invalid_key(client, tokens_file):
|
||||
response = client.post(
|
||||
"/graphql",
|
||||
json={
|
||||
"query": AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION,
|
||||
"variables": {
|
||||
"input": {
|
||||
"key": "invalid_token",
|
||||
"deviceName": "test_token",
|
||||
}
|
||||
},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is not None
|
||||
assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["success"] is False
|
||||
assert (
|
||||
response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None
|
||||
)
|
||||
assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404
|
||||
assert read_json(tokens_file) == TOKENS_FILE_CONTETS
|
||||
def test_graphql_authorize_new_device_with_invalid_key(
|
||||
client, authorized_client, tokens_file
|
||||
):
|
||||
response = graphql_try_auth_new_device(client, "invalid_token", "new_device")
|
||||
assert_errorcode(response, "authorizeWithNewDeviceApiKey", 404)
|
||||
|
||||
assert_original(authorized_client)
|
||||
|
||||
|
||||
def test_graphql_get_and_authorize_used_key(client, authorized_client, tokens_file):
|
||||
response = authorized_client.post(
|
||||
"/graphql",
|
||||
json={"query": NEW_DEVICE_KEY_MUTATION},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is not None
|
||||
assert response.json()["data"]["getNewDeviceApiKey"]["success"] is True
|
||||
assert response.json()["data"]["getNewDeviceApiKey"]["message"] is not None
|
||||
assert response.json()["data"]["getNewDeviceApiKey"]["code"] == 200
|
||||
mnemonic_key = response.json()["data"]["getNewDeviceApiKey"]["key"]
|
||||
assert mnemonic_key.split(" ").__len__() == 12
|
||||
key = Mnemonic(language="english").to_entropy(mnemonic_key).hex()
|
||||
assert read_json(tokens_file)["new_device"]["token"] == key
|
||||
response = client.post(
|
||||
"/graphql",
|
||||
json={
|
||||
"query": AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION,
|
||||
"variables": {
|
||||
"input": {
|
||||
"key": mnemonic_key,
|
||||
"deviceName": "new_token",
|
||||
}
|
||||
},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is not None
|
||||
assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["success"] is True
|
||||
assert (
|
||||
response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None
|
||||
)
|
||||
assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 200
|
||||
assert (
|
||||
read_json(tokens_file)["tokens"][2]["token"]
|
||||
== response.json()["data"]["authorizeWithNewDeviceApiKey"]["token"]
|
||||
)
|
||||
assert read_json(tokens_file)["tokens"][2]["name"] == "new_token"
|
||||
mnemonic_key = graphql_get_new_device_key(authorized_client)
|
||||
|
||||
response = client.post(
|
||||
"/graphql",
|
||||
json={
|
||||
"query": AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION,
|
||||
"variables": {
|
||||
"input": {
|
||||
"key": mnemonic_key,
|
||||
"deviceName": "test_token2",
|
||||
}
|
||||
},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is not None
|
||||
assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["success"] is False
|
||||
assert (
|
||||
response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None
|
||||
)
|
||||
assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404
|
||||
assert read_json(tokens_file)["tokens"].__len__() == 3
|
||||
graphql_authorize_new_device(client, mnemonic_key, "new_device")
|
||||
devices = graphql_get_devices(authorized_client)
|
||||
|
||||
response = graphql_try_auth_new_device(client, mnemonic_key, "new_device2")
|
||||
assert_errorcode(response, "authorizeWithNewDeviceApiKey", 404)
|
||||
|
||||
assert graphql_get_devices(authorized_client) == devices
|
||||
|
||||
|
||||
def test_graphql_get_and_authorize_key_after_12_minutes(
|
||||
client, authorized_client, tokens_file
|
||||
client, authorized_client, tokens_file, mocker
|
||||
):
|
||||
response = authorized_client.post(
|
||||
"/graphql",
|
||||
json={"query": NEW_DEVICE_KEY_MUTATION},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is not None
|
||||
assert response.json()["data"]["getNewDeviceApiKey"]["success"] is True
|
||||
assert response.json()["data"]["getNewDeviceApiKey"]["message"] is not None
|
||||
assert response.json()["data"]["getNewDeviceApiKey"]["code"] == 200
|
||||
assert (
|
||||
response.json()["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12
|
||||
)
|
||||
key = (
|
||||
Mnemonic(language="english")
|
||||
.to_entropy(response.json()["data"]["getNewDeviceApiKey"]["key"])
|
||||
.hex()
|
||||
)
|
||||
assert read_json(tokens_file)["new_device"]["token"] == key
|
||||
mnemonic_key = graphql_get_new_device_key(authorized_client)
|
||||
mock = mocker.patch(DEVICE_KEY_VALIDATION_DATETIME, NearFuture)
|
||||
|
||||
file_data = read_json(tokens_file)
|
||||
file_data["new_device"]["expiration"] = str(
|
||||
datetime.datetime.now() - datetime.timedelta(minutes=13)
|
||||
)
|
||||
write_json(tokens_file, file_data)
|
||||
|
||||
response = client.post(
|
||||
"/graphql",
|
||||
json={
|
||||
"query": AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION,
|
||||
"variables": {
|
||||
"input": {
|
||||
"key": key,
|
||||
"deviceName": "test_token",
|
||||
}
|
||||
},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is not None
|
||||
assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["success"] is False
|
||||
assert (
|
||||
response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None
|
||||
)
|
||||
assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404
|
||||
response = graphql_try_auth_new_device(client, mnemonic_key, "new_device")
|
||||
assert_errorcode(response, "authorizeWithNewDeviceApiKey", 404)
|
||||
|
||||
|
||||
def test_graphql_authorize_without_token(client, tokens_file):
|
||||
|
@ -480,5 +305,4 @@ def test_graphql_authorize_without_token(client, tokens_file):
|
|||
},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is None
|
||||
assert_empty(response)
|
||||
|
|
|
@ -1,24 +1,28 @@
|
|||
# pylint: disable=redefined-outer-name
|
||||
# pylint: disable=unused-argument
|
||||
# pylint: disable=missing-function-docstring
|
||||
import datetime
|
||||
|
||||
from tests.common import generate_api_query, mnemonic_to_hex, read_json, write_json
|
||||
from tests.common import (
|
||||
generate_api_query,
|
||||
assert_recovery_recent,
|
||||
NearFuture,
|
||||
RECOVERY_KEY_VALIDATION_DATETIME,
|
||||
)
|
||||
|
||||
TOKENS_FILE_CONTETS = {
|
||||
"tokens": [
|
||||
{
|
||||
"token": "TEST_TOKEN",
|
||||
"name": "test_token",
|
||||
"date": "2022-01-14 08:31:10.789314",
|
||||
},
|
||||
{
|
||||
"token": "TEST_TOKEN2",
|
||||
"name": "test_token2",
|
||||
"date": "2022-01-14 08:31:10.789314",
|
||||
},
|
||||
]
|
||||
}
|
||||
# Graphql API's output should be timezone-naive
|
||||
from tests.common import FIVE_MINUTES_INTO_FUTURE_NAIVE as FIVE_MINUTES_INTO_FUTURE
|
||||
from tests.common import FIVE_MINUTES_INTO_PAST_NAIVE as FIVE_MINUTES_INTO_PAST
|
||||
|
||||
from tests.test_graphql.common import (
|
||||
assert_empty,
|
||||
assert_data,
|
||||
assert_ok,
|
||||
assert_errorcode,
|
||||
assert_token_valid,
|
||||
assert_original,
|
||||
graphql_get_devices,
|
||||
set_client_token,
|
||||
)
|
||||
|
||||
API_RECOVERY_QUERY = """
|
||||
recoveryKey {
|
||||
|
@ -31,28 +35,85 @@ recoveryKey {
|
|||
"""
|
||||
|
||||
|
||||
def test_graphql_recovery_key_status_unauthorized(client, tokens_file):
|
||||
response = client.post(
|
||||
def request_recovery_status(client):
|
||||
return client.post(
|
||||
"/graphql",
|
||||
json={"query": generate_api_query([API_RECOVERY_QUERY])},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is None
|
||||
|
||||
|
||||
def graphql_recovery_status(client):
|
||||
response = request_recovery_status(client)
|
||||
data = assert_data(response)
|
||||
|
||||
status = data["api"]["recoveryKey"]
|
||||
assert status is not None
|
||||
return status
|
||||
|
||||
|
||||
def request_make_new_recovery_key(client, expires_at=None, uses=None):
|
||||
json = {"query": API_RECOVERY_KEY_GENERATE_MUTATION}
|
||||
limits = {}
|
||||
|
||||
if expires_at is not None:
|
||||
limits["expirationDate"] = expires_at.isoformat()
|
||||
if uses is not None:
|
||||
limits["uses"] = uses
|
||||
|
||||
if limits != {}:
|
||||
json["variables"] = {"limits": limits}
|
||||
|
||||
response = client.post("/graphql", json=json)
|
||||
return response
|
||||
|
||||
|
||||
def graphql_make_new_recovery_key(client, expires_at=None, uses=None):
|
||||
response = request_make_new_recovery_key(client, expires_at, uses)
|
||||
assert_ok(response, "getNewRecoveryApiKey")
|
||||
key = response.json()["data"]["getNewRecoveryApiKey"]["key"]
|
||||
assert key is not None
|
||||
assert key.split(" ").__len__() == 18
|
||||
return key
|
||||
|
||||
|
||||
def request_recovery_auth(client, key, device_name):
|
||||
return client.post(
|
||||
"/graphql",
|
||||
json={
|
||||
"query": API_RECOVERY_KEY_USE_MUTATION,
|
||||
"variables": {
|
||||
"input": {
|
||||
"key": key,
|
||||
"deviceName": device_name,
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def graphql_use_recovery_key(client, key, device_name):
|
||||
response = request_recovery_auth(client, key, device_name)
|
||||
assert_ok(response, "useRecoveryApiKey")
|
||||
token = response.json()["data"]["useRecoveryApiKey"]["token"]
|
||||
assert token is not None
|
||||
assert_token_valid(client, token)
|
||||
set_client_token(client, token)
|
||||
assert device_name in [device["name"] for device in graphql_get_devices(client)]
|
||||
return token
|
||||
|
||||
|
||||
def test_graphql_recovery_key_status_unauthorized(client, tokens_file):
|
||||
response = request_recovery_status(client)
|
||||
assert_empty(response)
|
||||
|
||||
|
||||
def test_graphql_recovery_key_status_when_none_exists(authorized_client, tokens_file):
|
||||
response = authorized_client.post(
|
||||
"/graphql",
|
||||
json={"query": generate_api_query([API_RECOVERY_QUERY])},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is not None
|
||||
assert response.json()["data"]["api"]["recoveryKey"] is not None
|
||||
assert response.json()["data"]["api"]["recoveryKey"]["exists"] is False
|
||||
assert response.json()["data"]["api"]["recoveryKey"]["valid"] is False
|
||||
assert response.json()["data"]["api"]["recoveryKey"]["creationDate"] is None
|
||||
assert response.json()["data"]["api"]["recoveryKey"]["expirationDate"] is None
|
||||
assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] is None
|
||||
status = graphql_recovery_status(authorized_client)
|
||||
assert status["exists"] is False
|
||||
assert status["valid"] is False
|
||||
assert status["creationDate"] is None
|
||||
assert status["expirationDate"] is None
|
||||
assert status["usesLeft"] is None
|
||||
|
||||
|
||||
API_RECOVERY_KEY_GENERATE_MUTATION = """
|
||||
|
@ -79,277 +140,71 @@ mutation TestUseRecoveryKey($input: UseRecoveryKeyInput!) {
|
|||
|
||||
|
||||
def test_graphql_generate_recovery_key(client, authorized_client, tokens_file):
|
||||
response = authorized_client.post(
|
||||
"/graphql",
|
||||
json={
|
||||
"query": API_RECOVERY_KEY_GENERATE_MUTATION,
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is not None
|
||||
assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is True
|
||||
assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None
|
||||
assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 200
|
||||
assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is not None
|
||||
assert (
|
||||
response.json()["data"]["getNewRecoveryApiKey"]["key"].split(" ").__len__()
|
||||
== 18
|
||||
)
|
||||
assert read_json(tokens_file)["recovery_token"] is not None
|
||||
time_generated = read_json(tokens_file)["recovery_token"]["date"]
|
||||
assert time_generated is not None
|
||||
key = response.json()["data"]["getNewRecoveryApiKey"]["key"]
|
||||
assert (
|
||||
datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%f")
|
||||
- datetime.timedelta(seconds=5)
|
||||
< datetime.datetime.now()
|
||||
)
|
||||
key = graphql_make_new_recovery_key(authorized_client)
|
||||
|
||||
# Try to get token status
|
||||
response = authorized_client.post(
|
||||
"/graphql",
|
||||
json={"query": generate_api_query([API_RECOVERY_QUERY])},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is not None
|
||||
assert response.json()["data"]["api"]["recoveryKey"] is not None
|
||||
assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True
|
||||
assert response.json()["data"]["api"]["recoveryKey"]["valid"] is True
|
||||
assert response.json()["data"]["api"]["recoveryKey"][
|
||||
"creationDate"
|
||||
] == time_generated.replace("Z", "")
|
||||
assert response.json()["data"]["api"]["recoveryKey"]["expirationDate"] is None
|
||||
assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] is None
|
||||
status = graphql_recovery_status(authorized_client)
|
||||
assert status["exists"] is True
|
||||
assert status["valid"] is True
|
||||
assert_recovery_recent(status["creationDate"])
|
||||
assert status["expirationDate"] is None
|
||||
assert status["usesLeft"] is None
|
||||
|
||||
# Try to use token
|
||||
response = client.post(
|
||||
"/graphql",
|
||||
json={
|
||||
"query": API_RECOVERY_KEY_USE_MUTATION,
|
||||
"variables": {
|
||||
"input": {
|
||||
"key": key,
|
||||
"deviceName": "new_test_token",
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is not None
|
||||
assert response.json()["data"]["useRecoveryApiKey"]["success"] is True
|
||||
assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None
|
||||
assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200
|
||||
assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None
|
||||
assert (
|
||||
response.json()["data"]["useRecoveryApiKey"]["token"]
|
||||
== read_json(tokens_file)["tokens"][2]["token"]
|
||||
)
|
||||
assert read_json(tokens_file)["tokens"][2]["name"] == "new_test_token"
|
||||
|
||||
# Try to use token again
|
||||
response = client.post(
|
||||
"/graphql",
|
||||
json={
|
||||
"query": API_RECOVERY_KEY_USE_MUTATION,
|
||||
"variables": {
|
||||
"input": {
|
||||
"key": key,
|
||||
"deviceName": "new_test_token2",
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is not None
|
||||
assert response.json()["data"]["useRecoveryApiKey"]["success"] is True
|
||||
assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None
|
||||
assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200
|
||||
assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None
|
||||
assert (
|
||||
response.json()["data"]["useRecoveryApiKey"]["token"]
|
||||
== read_json(tokens_file)["tokens"][3]["token"]
|
||||
)
|
||||
assert read_json(tokens_file)["tokens"][3]["name"] == "new_test_token2"
|
||||
graphql_use_recovery_key(client, key, "new_test_token")
|
||||
# And again
|
||||
graphql_use_recovery_key(client, key, "new_test_token2")
|
||||
|
||||
|
||||
def test_graphql_generate_recovery_key_with_expiration_date(
|
||||
client, authorized_client, tokens_file
|
||||
):
|
||||
expiration_date = datetime.datetime.now() + datetime.timedelta(minutes=5)
|
||||
expiration_date_str = expiration_date.strftime("%Y-%m-%dT%H:%M:%S.%f")
|
||||
response = authorized_client.post(
|
||||
"/graphql",
|
||||
json={
|
||||
"query": API_RECOVERY_KEY_GENERATE_MUTATION,
|
||||
"variables": {
|
||||
"limits": {
|
||||
"expirationDate": expiration_date_str,
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is not None
|
||||
assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is True
|
||||
assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None
|
||||
assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 200
|
||||
assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is not None
|
||||
assert (
|
||||
response.json()["data"]["getNewRecoveryApiKey"]["key"].split(" ").__len__()
|
||||
== 18
|
||||
)
|
||||
assert read_json(tokens_file)["recovery_token"] is not None
|
||||
expiration_date = FIVE_MINUTES_INTO_FUTURE
|
||||
key = graphql_make_new_recovery_key(authorized_client, expires_at=expiration_date)
|
||||
|
||||
key = response.json()["data"]["getNewRecoveryApiKey"]["key"]
|
||||
assert read_json(tokens_file)["recovery_token"]["expiration"] == expiration_date_str
|
||||
assert read_json(tokens_file)["recovery_token"]["token"] == mnemonic_to_hex(key)
|
||||
status = graphql_recovery_status(authorized_client)
|
||||
assert status["exists"] is True
|
||||
assert status["valid"] is True
|
||||
assert_recovery_recent(status["creationDate"])
|
||||
assert status["expirationDate"] == expiration_date.isoformat()
|
||||
assert status["usesLeft"] is None
|
||||
|
||||
time_generated = read_json(tokens_file)["recovery_token"]["date"]
|
||||
assert time_generated is not None
|
||||
assert (
|
||||
datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%f")
|
||||
- datetime.timedelta(seconds=5)
|
||||
< datetime.datetime.now()
|
||||
)
|
||||
graphql_use_recovery_key(client, key, "new_test_token")
|
||||
# And again
|
||||
graphql_use_recovery_key(client, key, "new_test_token2")
|
||||
|
||||
# Try to get token status
|
||||
response = authorized_client.post(
|
||||
"/graphql",
|
||||
json={"query": generate_api_query([API_RECOVERY_QUERY])},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is not None
|
||||
assert response.json()["data"]["api"]["recoveryKey"] is not None
|
||||
assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True
|
||||
assert response.json()["data"]["api"]["recoveryKey"]["valid"] is True
|
||||
assert response.json()["data"]["api"]["recoveryKey"][
|
||||
"creationDate"
|
||||
] == time_generated.replace("Z", "")
|
||||
assert (
|
||||
response.json()["data"]["api"]["recoveryKey"]["expirationDate"]
|
||||
== expiration_date_str
|
||||
)
|
||||
assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] is None
|
||||
|
||||
# Try to use token
|
||||
response = authorized_client.post(
|
||||
"/graphql",
|
||||
json={
|
||||
"query": API_RECOVERY_KEY_USE_MUTATION,
|
||||
"variables": {
|
||||
"input": {
|
||||
"key": key,
|
||||
"deviceName": "new_test_token",
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is not None
|
||||
assert response.json()["data"]["useRecoveryApiKey"]["success"] is True
|
||||
assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None
|
||||
assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200
|
||||
assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None
|
||||
assert (
|
||||
response.json()["data"]["useRecoveryApiKey"]["token"]
|
||||
== read_json(tokens_file)["tokens"][2]["token"]
|
||||
)
|
||||
def test_graphql_use_recovery_key_after_expiration(
|
||||
client, authorized_client, tokens_file, mocker
|
||||
):
|
||||
expiration_date = FIVE_MINUTES_INTO_FUTURE
|
||||
key = graphql_make_new_recovery_key(authorized_client, expires_at=expiration_date)
|
||||
|
||||
# Try to use token again
|
||||
response = authorized_client.post(
|
||||
"/graphql",
|
||||
json={
|
||||
"query": API_RECOVERY_KEY_USE_MUTATION,
|
||||
"variables": {
|
||||
"input": {
|
||||
"key": key,
|
||||
"deviceName": "new_test_token2",
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is not None
|
||||
assert response.json()["data"]["useRecoveryApiKey"]["success"] is True
|
||||
assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None
|
||||
assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200
|
||||
assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None
|
||||
assert (
|
||||
response.json()["data"]["useRecoveryApiKey"]["token"]
|
||||
== read_json(tokens_file)["tokens"][3]["token"]
|
||||
)
|
||||
# Timewarp to after it expires
|
||||
mock = mocker.patch(RECOVERY_KEY_VALIDATION_DATETIME, NearFuture)
|
||||
|
||||
# Try to use token after expiration date
|
||||
new_data = read_json(tokens_file)
|
||||
new_data["recovery_token"]["expiration"] = (
|
||||
datetime.datetime.now() - datetime.timedelta(minutes=5)
|
||||
).strftime("%Y-%m-%dT%H:%M:%S.%f")
|
||||
write_json(tokens_file, new_data)
|
||||
response = authorized_client.post(
|
||||
"/graphql",
|
||||
json={
|
||||
"query": API_RECOVERY_KEY_USE_MUTATION,
|
||||
"variables": {
|
||||
"input": {
|
||||
"key": key,
|
||||
"deviceName": "new_test_token3",
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is not None
|
||||
assert response.json()["data"]["useRecoveryApiKey"]["success"] is False
|
||||
assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None
|
||||
assert response.json()["data"]["useRecoveryApiKey"]["code"] == 404
|
||||
response = request_recovery_auth(client, key, "new_test_token3")
|
||||
assert_errorcode(response, "useRecoveryApiKey", 404)
|
||||
assert response.json()["data"]["useRecoveryApiKey"]["token"] is None
|
||||
assert_original(authorized_client)
|
||||
|
||||
assert read_json(tokens_file)["tokens"] == new_data["tokens"]
|
||||
|
||||
# Try to get token status
|
||||
response = authorized_client.post(
|
||||
"/graphql",
|
||||
json={"query": generate_api_query([API_RECOVERY_QUERY])},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is not None
|
||||
assert response.json()["data"]["api"]["recoveryKey"] is not None
|
||||
assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True
|
||||
assert response.json()["data"]["api"]["recoveryKey"]["valid"] is False
|
||||
assert (
|
||||
response.json()["data"]["api"]["recoveryKey"]["creationDate"] == time_generated
|
||||
)
|
||||
assert (
|
||||
response.json()["data"]["api"]["recoveryKey"]["expirationDate"]
|
||||
== new_data["recovery_token"]["expiration"]
|
||||
)
|
||||
assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] is None
|
||||
status = graphql_recovery_status(authorized_client)
|
||||
assert status["exists"] is True
|
||||
assert status["valid"] is False
|
||||
assert_recovery_recent(status["creationDate"])
|
||||
assert status["expirationDate"] == expiration_date.isoformat()
|
||||
assert status["usesLeft"] is None
|
||||
|
||||
|
||||
def test_graphql_generate_recovery_key_with_expiration_in_the_past(
|
||||
authorized_client, tokens_file
|
||||
):
|
||||
expiration_date = datetime.datetime.now() - datetime.timedelta(minutes=5)
|
||||
expiration_date_str = expiration_date.strftime("%Y-%m-%dT%H:%M:%S.%f")
|
||||
|
||||
response = authorized_client.post(
|
||||
"/graphql",
|
||||
json={
|
||||
"query": API_RECOVERY_KEY_GENERATE_MUTATION,
|
||||
"variables": {
|
||||
"limits": {
|
||||
"expirationDate": expiration_date_str,
|
||||
},
|
||||
},
|
||||
},
|
||||
expiration_date = FIVE_MINUTES_INTO_PAST
|
||||
response = request_make_new_recovery_key(
|
||||
authorized_client, expires_at=expiration_date
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is not None
|
||||
assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is False
|
||||
assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None
|
||||
assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 400
|
||||
|
||||
assert_errorcode(response, "getNewRecoveryApiKey", 400)
|
||||
assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is None
|
||||
assert "recovery_token" not in read_json(tokens_file)
|
||||
assert graphql_recovery_status(authorized_client)["exists"] is False
|
||||
|
||||
|
||||
def test_graphql_generate_recovery_key_with_invalid_time_format(
|
||||
|
@ -369,183 +224,57 @@ def test_graphql_generate_recovery_key_with_invalid_time_format(
|
|||
},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is None
|
||||
|
||||
assert "recovery_token" not in read_json(tokens_file)
|
||||
assert_empty(response)
|
||||
assert graphql_recovery_status(authorized_client)["exists"] is False
|
||||
|
||||
|
||||
def test_graphql_generate_recovery_key_with_limited_uses(
|
||||
authorized_client, tokens_file
|
||||
authorized_client, client, tokens_file
|
||||
):
|
||||
|
||||
response = authorized_client.post(
|
||||
"/graphql",
|
||||
json={
|
||||
"query": API_RECOVERY_KEY_GENERATE_MUTATION,
|
||||
"variables": {
|
||||
"limits": {
|
||||
"expirationDate": None,
|
||||
"uses": 2,
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is not None
|
||||
assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is True
|
||||
assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None
|
||||
assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 200
|
||||
assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is not None
|
||||
mnemonic_key = graphql_make_new_recovery_key(authorized_client, uses=2)
|
||||
|
||||
mnemonic_key = response.json()["data"]["getNewRecoveryApiKey"]["key"]
|
||||
key = mnemonic_to_hex(mnemonic_key)
|
||||
status = graphql_recovery_status(authorized_client)
|
||||
assert status["exists"] is True
|
||||
assert status["valid"] is True
|
||||
assert status["creationDate"] is not None
|
||||
assert status["expirationDate"] is None
|
||||
assert status["usesLeft"] == 2
|
||||
|
||||
assert read_json(tokens_file)["recovery_token"]["token"] == key
|
||||
assert read_json(tokens_file)["recovery_token"]["uses_left"] == 2
|
||||
graphql_use_recovery_key(client, mnemonic_key, "new_test_token1")
|
||||
|
||||
# Try to get token status
|
||||
response = authorized_client.post(
|
||||
"/graphql",
|
||||
json={"query": generate_api_query([API_RECOVERY_QUERY])},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is not None
|
||||
assert response.json()["data"]["api"]["recoveryKey"] is not None
|
||||
assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True
|
||||
assert response.json()["data"]["api"]["recoveryKey"]["valid"] is True
|
||||
assert response.json()["data"]["api"]["recoveryKey"]["creationDate"] is not None
|
||||
assert response.json()["data"]["api"]["recoveryKey"]["expirationDate"] is None
|
||||
assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] == 2
|
||||
status = graphql_recovery_status(authorized_client)
|
||||
assert status["exists"] is True
|
||||
assert status["valid"] is True
|
||||
assert status["creationDate"] is not None
|
||||
assert status["expirationDate"] is None
|
||||
assert status["usesLeft"] == 1
|
||||
|
||||
# Try to use token
|
||||
response = authorized_client.post(
|
||||
"/graphql",
|
||||
json={
|
||||
"query": API_RECOVERY_KEY_USE_MUTATION,
|
||||
"variables": {
|
||||
"input": {
|
||||
"key": mnemonic_key,
|
||||
"deviceName": "test_token1",
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is not None
|
||||
assert response.json()["data"]["useRecoveryApiKey"]["success"] is True
|
||||
assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None
|
||||
assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200
|
||||
assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None
|
||||
graphql_use_recovery_key(client, mnemonic_key, "new_test_token2")
|
||||
|
||||
# Try to get token status
|
||||
response = authorized_client.post(
|
||||
"/graphql",
|
||||
json={"query": generate_api_query([API_RECOVERY_QUERY])},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is not None
|
||||
assert response.json()["data"]["api"]["recoveryKey"] is not None
|
||||
assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True
|
||||
assert response.json()["data"]["api"]["recoveryKey"]["valid"] is True
|
||||
assert response.json()["data"]["api"]["recoveryKey"]["creationDate"] is not None
|
||||
assert response.json()["data"]["api"]["recoveryKey"]["expirationDate"] is None
|
||||
assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] == 1
|
||||
status = graphql_recovery_status(authorized_client)
|
||||
assert status["exists"] is True
|
||||
assert status["valid"] is False
|
||||
assert status["creationDate"] is not None
|
||||
assert status["expirationDate"] is None
|
||||
assert status["usesLeft"] == 0
|
||||
|
||||
# Try to use token
|
||||
response = authorized_client.post(
|
||||
"/graphql",
|
||||
json={
|
||||
"query": API_RECOVERY_KEY_USE_MUTATION,
|
||||
"variables": {
|
||||
"input": {
|
||||
"key": mnemonic_key,
|
||||
"deviceName": "test_token2",
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is not None
|
||||
assert response.json()["data"]["useRecoveryApiKey"]["success"] is True
|
||||
assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None
|
||||
assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200
|
||||
assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None
|
||||
|
||||
# Try to get token status
|
||||
response = authorized_client.post(
|
||||
"/graphql",
|
||||
json={"query": generate_api_query([API_RECOVERY_QUERY])},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is not None
|
||||
assert response.json()["data"]["api"]["recoveryKey"] is not None
|
||||
assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True
|
||||
assert response.json()["data"]["api"]["recoveryKey"]["valid"] is False
|
||||
assert response.json()["data"]["api"]["recoveryKey"]["creationDate"] is not None
|
||||
assert response.json()["data"]["api"]["recoveryKey"]["expirationDate"] is None
|
||||
assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] == 0
|
||||
|
||||
# Try to use token
|
||||
response = authorized_client.post(
|
||||
"/graphql",
|
||||
json={
|
||||
"query": API_RECOVERY_KEY_USE_MUTATION,
|
||||
"variables": {
|
||||
"input": {
|
||||
"key": mnemonic_key,
|
||||
"deviceName": "test_token3",
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is not None
|
||||
assert response.json()["data"]["useRecoveryApiKey"]["success"] is False
|
||||
assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None
|
||||
assert response.json()["data"]["useRecoveryApiKey"]["code"] == 404
|
||||
assert response.json()["data"]["useRecoveryApiKey"]["token"] is None
|
||||
response = request_recovery_auth(client, mnemonic_key, "new_test_token3")
|
||||
assert_errorcode(response, "useRecoveryApiKey", 404)
|
||||
|
||||
|
||||
def test_graphql_generate_recovery_key_with_negative_uses(
|
||||
authorized_client, tokens_file
|
||||
):
|
||||
# Try to get token status
|
||||
response = authorized_client.post(
|
||||
"/graphql",
|
||||
json={
|
||||
"query": API_RECOVERY_KEY_GENERATE_MUTATION,
|
||||
"variables": {
|
||||
"limits": {
|
||||
"uses": -1,
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is not None
|
||||
assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is False
|
||||
assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None
|
||||
assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 400
|
||||
response = request_make_new_recovery_key(authorized_client, uses=-1)
|
||||
|
||||
assert_errorcode(response, "getNewRecoveryApiKey", 400)
|
||||
assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is None
|
||||
|
||||
|
||||
def test_graphql_generate_recovery_key_with_zero_uses(authorized_client, tokens_file):
|
||||
# Try to get token status
|
||||
response = authorized_client.post(
|
||||
"/graphql",
|
||||
json={
|
||||
"query": API_RECOVERY_KEY_GENERATE_MUTATION,
|
||||
"variables": {
|
||||
"limits": {
|
||||
"uses": 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is not None
|
||||
assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is False
|
||||
assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None
|
||||
assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 400
|
||||
response = request_make_new_recovery_key(authorized_client, uses=0)
|
||||
|
||||
assert_errorcode(response, "getNewRecoveryApiKey", 400)
|
||||
assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is None
|
||||
assert graphql_recovery_status(authorized_client)["exists"] is False
|
||||
|
|
|
@ -0,0 +1,245 @@
|
|||
# pylint: disable=redefined-outer-name
|
||||
# pylint: disable=unused-argument
|
||||
# pylint: disable=missing-function-docstring
|
||||
"""
|
||||
tests that restrict json token repository implementation
|
||||
"""
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from selfprivacy_api.models.tokens.token import Token
|
||||
from selfprivacy_api.repositories.tokens.exceptions import (
|
||||
TokenNotFound,
|
||||
RecoveryKeyNotFound,
|
||||
NewDeviceKeyNotFound,
|
||||
)
|
||||
from selfprivacy_api.repositories.tokens.json_tokens_repository import (
|
||||
JsonTokensRepository,
|
||||
)
|
||||
|
||||
from tests.common import read_json
|
||||
from test_tokens_repository import (
|
||||
mock_recovery_key_generate,
|
||||
mock_generate_token,
|
||||
mock_new_device_key_generate,
|
||||
)
|
||||
|
||||
ORIGINAL_TOKEN_CONTENT = [
|
||||
{
|
||||
"token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI",
|
||||
"name": "primary_token",
|
||||
"date": "2022-07-15 17:41:31.675698",
|
||||
},
|
||||
{
|
||||
"token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68",
|
||||
"name": "second_token",
|
||||
"date": "2022-07-15 17:41:31.675698Z",
|
||||
},
|
||||
{
|
||||
"token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8",
|
||||
"name": "third_token",
|
||||
"date": "2022-07-15T17:41:31.675698Z",
|
||||
},
|
||||
{
|
||||
"token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM",
|
||||
"name": "forth_token",
|
||||
"date": "2022-07-15T17:41:31.675698",
|
||||
},
|
||||
]
|
||||
|
||||
EMPTY_KEYS_JSON = """
|
||||
{
|
||||
"tokens": [
|
||||
{
|
||||
"token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI",
|
||||
"name": "primary_token",
|
||||
"date": "2022-07-15 17:41:31.675698"
|
||||
}
|
||||
]
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def tokens(mocker, datadir):
|
||||
mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=datadir / "tokens.json")
|
||||
assert read_json(datadir / "tokens.json")["tokens"] == ORIGINAL_TOKEN_CONTENT
|
||||
return datadir
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def empty_keys(mocker, tmpdir):
|
||||
tokens_file = tmpdir / "empty_keys.json"
|
||||
with open(tokens_file, "w") as file:
|
||||
file.write(EMPTY_KEYS_JSON)
|
||||
mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=tokens_file)
|
||||
assert read_json(tokens_file)["tokens"] == [
|
||||
{
|
||||
"token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI",
|
||||
"name": "primary_token",
|
||||
"date": "2022-07-15 17:41:31.675698",
|
||||
}
|
||||
]
|
||||
return tmpdir
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def null_keys(mocker, datadir):
|
||||
mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=datadir / "null_keys.json")
|
||||
assert read_json(datadir / "null_keys.json")["recovery_token"] is None
|
||||
assert read_json(datadir / "null_keys.json")["new_device"] is None
|
||||
return datadir
|
||||
|
||||
|
||||
def test_delete_token(tokens):
|
||||
repo = JsonTokensRepository()
|
||||
input_token = Token(
|
||||
token="KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI",
|
||||
device_name="primary_token",
|
||||
created_at=datetime(2022, 7, 15, 17, 41, 31, 675698),
|
||||
)
|
||||
|
||||
repo.delete_token(input_token)
|
||||
assert read_json(tokens / "tokens.json")["tokens"] == [
|
||||
{
|
||||
"token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68",
|
||||
"name": "second_token",
|
||||
"date": "2022-07-15 17:41:31.675698Z",
|
||||
},
|
||||
{
|
||||
"token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8",
|
||||
"name": "third_token",
|
||||
"date": "2022-07-15T17:41:31.675698Z",
|
||||
},
|
||||
{
|
||||
"token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM",
|
||||
"name": "forth_token",
|
||||
"date": "2022-07-15T17:41:31.675698",
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
def test_delete_not_found_token(tokens):
|
||||
repo = JsonTokensRepository()
|
||||
input_token = Token(
|
||||
token="imbadtoken",
|
||||
device_name="primary_token",
|
||||
created_at=datetime(2022, 7, 15, 17, 41, 31, 675698),
|
||||
)
|
||||
with pytest.raises(TokenNotFound):
|
||||
assert repo.delete_token(input_token) is None
|
||||
|
||||
assert read_json(tokens / "tokens.json")["tokens"] == ORIGINAL_TOKEN_CONTENT
|
||||
|
||||
|
||||
def test_create_recovery_key(tokens, mock_recovery_key_generate):
|
||||
repo = JsonTokensRepository()
|
||||
|
||||
assert repo.create_recovery_key(uses_left=1, expiration=None) is not None
|
||||
assert read_json(tokens / "tokens.json")["recovery_token"] == {
|
||||
"token": "889bf49c1d3199d71a2e704718772bd53a422020334db051",
|
||||
"date": "2022-07-15T17:41:31.675698",
|
||||
"expiration": None,
|
||||
"uses_left": 1,
|
||||
}
|
||||
|
||||
|
||||
def test_use_mnemonic_recovery_key_when_null(null_keys):
|
||||
repo = JsonTokensRepository()
|
||||
|
||||
with pytest.raises(RecoveryKeyNotFound):
|
||||
assert (
|
||||
repo.use_mnemonic_recovery_key(
|
||||
mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb",
|
||||
device_name="primary_token",
|
||||
)
|
||||
is None
|
||||
)
|
||||
|
||||
|
||||
def test_use_mnemonic_recovery_key(tokens, mock_generate_token):
|
||||
repo = JsonTokensRepository()
|
||||
|
||||
assert repo.use_mnemonic_recovery_key(
|
||||
mnemonic_phrase="uniform clarify napkin bid dress search input armor police cross salon because myself uphold slice bamboo hungry park",
|
||||
device_name="newdevice",
|
||||
) == Token(
|
||||
token="ur71mC4aiI6FIYAN--cTL-38rPHS5D6NuB1bgN_qKF4",
|
||||
device_name="newdevice",
|
||||
created_at=datetime(2022, 11, 14, 6, 6, 32, 777123),
|
||||
)
|
||||
|
||||
assert read_json(tokens / "tokens.json")["tokens"] == [
|
||||
{
|
||||
"date": "2022-07-15 17:41:31.675698",
|
||||
"name": "primary_token",
|
||||
"token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI",
|
||||
},
|
||||
{
|
||||
"token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68",
|
||||
"name": "second_token",
|
||||
"date": "2022-07-15 17:41:31.675698Z",
|
||||
},
|
||||
{
|
||||
"token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8",
|
||||
"name": "third_token",
|
||||
"date": "2022-07-15T17:41:31.675698Z",
|
||||
},
|
||||
{
|
||||
"token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM",
|
||||
"name": "forth_token",
|
||||
"date": "2022-07-15T17:41:31.675698",
|
||||
},
|
||||
{
|
||||
"date": "2022-11-14T06:06:32.777123",
|
||||
"name": "newdevice",
|
||||
"token": "ur71mC4aiI6FIYAN--cTL-38rPHS5D6NuB1bgN_qKF4",
|
||||
},
|
||||
]
|
||||
assert read_json(tokens / "tokens.json")["recovery_token"] == {
|
||||
"date": "2022-11-11T11:48:54.228038",
|
||||
"expiration": None,
|
||||
"token": "ed653e4b8b042b841d285fa7a682fa09e925ddb2d8906f54",
|
||||
"uses_left": 1,
|
||||
}
|
||||
|
||||
|
||||
def test_get_new_device_key(tokens, mock_new_device_key_generate):
|
||||
repo = JsonTokensRepository()
|
||||
|
||||
assert repo.get_new_device_key() is not None
|
||||
assert read_json(tokens / "tokens.json")["new_device"] == {
|
||||
"date": "2022-07-15T17:41:31.675698",
|
||||
"expiration": "2022-07-15T17:41:31.675698",
|
||||
"token": "43478d05b35e4781598acd76e33832bb",
|
||||
}
|
||||
|
||||
|
||||
def test_delete_new_device_key(tokens):
|
||||
repo = JsonTokensRepository()
|
||||
|
||||
assert repo.delete_new_device_key() is None
|
||||
assert "new_device" not in read_json(tokens / "tokens.json")
|
||||
|
||||
|
||||
def test_delete_new_device_key_when_empty(empty_keys):
|
||||
repo = JsonTokensRepository()
|
||||
|
||||
repo.delete_new_device_key()
|
||||
assert "new_device" not in read_json(empty_keys / "empty_keys.json")
|
||||
|
||||
|
||||
def test_use_mnemonic_new_device_key_when_null(null_keys):
|
||||
repo = JsonTokensRepository()
|
||||
|
||||
with pytest.raises(NewDeviceKeyNotFound):
|
||||
assert (
|
||||
repo.use_mnemonic_new_device_key(
|
||||
device_name="imnew",
|
||||
mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb",
|
||||
)
|
||||
is None
|
||||
)
|
|
@ -0,0 +1,26 @@
|
|||
{
|
||||
"tokens": [
|
||||
{
|
||||
"token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI",
|
||||
"name": "primary_token",
|
||||
"date": "2022-07-15 17:41:31.675698"
|
||||
},
|
||||
{
|
||||
"token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68",
|
||||
"name": "second_token",
|
||||
"date": "2022-07-15 17:41:31.675698Z"
|
||||
},
|
||||
{
|
||||
"token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8",
|
||||
"name": "third_token",
|
||||
"date": "2022-07-15T17:41:31.675698Z"
|
||||
},
|
||||
{
|
||||
"token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM",
|
||||
"name": "forth_token",
|
||||
"date": "2022-07-15T17:41:31.675698"
|
||||
}
|
||||
],
|
||||
"recovery_token": null,
|
||||
"new_device": null
|
||||
}
|
|
@ -0,0 +1,35 @@
|
|||
{
|
||||
"tokens": [
|
||||
{
|
||||
"token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI",
|
||||
"name": "primary_token",
|
||||
"date": "2022-07-15 17:41:31.675698"
|
||||
},
|
||||
{
|
||||
"token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68",
|
||||
"name": "second_token",
|
||||
"date": "2022-07-15 17:41:31.675698Z"
|
||||
},
|
||||
{
|
||||
"token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8",
|
||||
"name": "third_token",
|
||||
"date": "2022-07-15T17:41:31.675698Z"
|
||||
},
|
||||
{
|
||||
"token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM",
|
||||
"name": "forth_token",
|
||||
"date": "2022-07-15T17:41:31.675698"
|
||||
}
|
||||
],
|
||||
"recovery_token": {
|
||||
"token": "ed653e4b8b042b841d285fa7a682fa09e925ddb2d8906f54",
|
||||
"date": "2022-11-11T11:48:54.228038",
|
||||
"expiration": null,
|
||||
"uses_left": 2
|
||||
},
|
||||
"new_device": {
|
||||
"token": "2237238de23dc71ab558e317bdb8ff8e",
|
||||
"date": "2022-10-26 20:50:47.973212",
|
||||
"expiration": "2022-10-26 21:00:47.974153"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,605 @@
|
|||
# pylint: disable=redefined-outer-name
|
||||
# pylint: disable=unused-argument
|
||||
# pylint: disable=missing-function-docstring
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from mnemonic import Mnemonic
|
||||
|
||||
import pytest
|
||||
|
||||
from selfprivacy_api.models.tokens.new_device_key import NewDeviceKey
|
||||
from selfprivacy_api.models.tokens.recovery_key import RecoveryKey
|
||||
from selfprivacy_api.models.tokens.token import Token
|
||||
from selfprivacy_api.repositories.tokens.exceptions import (
|
||||
InvalidMnemonic,
|
||||
RecoveryKeyNotFound,
|
||||
TokenNotFound,
|
||||
NewDeviceKeyNotFound,
|
||||
)
|
||||
|
||||
from selfprivacy_api.repositories.tokens.json_tokens_repository import (
|
||||
JsonTokensRepository,
|
||||
)
|
||||
from selfprivacy_api.repositories.tokens.redis_tokens_repository import (
|
||||
RedisTokensRepository,
|
||||
)
|
||||
from selfprivacy_api.repositories.tokens.abstract_tokens_repository import (
|
||||
AbstractTokensRepository,
|
||||
)
|
||||
|
||||
from tests.common import FIVE_MINUTES_INTO_PAST, FIVE_MINUTES_INTO_FUTURE
|
||||
|
||||
|
||||
ORIGINAL_DEVICE_NAMES = [
|
||||
"primary_token",
|
||||
"second_token",
|
||||
"third_token",
|
||||
"forth_token",
|
||||
]
|
||||
|
||||
TEST_DATE = datetime(2022, 7, 15, 17, 41, 31, 675698, timezone.utc)
|
||||
# tokens are not tz-aware
|
||||
TOKEN_TEST_DATE = datetime(2022, 7, 15, 17, 41, 31, 675698)
|
||||
|
||||
|
||||
def mnemonic_from_hex(hexkey):
|
||||
return Mnemonic(language="english").to_mnemonic(bytes.fromhex(hexkey))
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_new_device_key_generate(mocker):
|
||||
mock = mocker.patch(
|
||||
"selfprivacy_api.models.tokens.new_device_key.NewDeviceKey.generate",
|
||||
autospec=True,
|
||||
return_value=NewDeviceKey(
|
||||
key="43478d05b35e4781598acd76e33832bb",
|
||||
created_at=TEST_DATE,
|
||||
expires_at=TEST_DATE,
|
||||
),
|
||||
)
|
||||
return mock
|
||||
|
||||
|
||||
# mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb",
|
||||
@pytest.fixture
|
||||
def mock_new_device_key_generate_for_mnemonic(mocker):
|
||||
mock = mocker.patch(
|
||||
"selfprivacy_api.models.tokens.new_device_key.NewDeviceKey.generate",
|
||||
autospec=True,
|
||||
return_value=NewDeviceKey(
|
||||
key="2237238de23dc71ab558e317bdb8ff8e",
|
||||
created_at=TEST_DATE,
|
||||
expires_at=TEST_DATE,
|
||||
),
|
||||
)
|
||||
return mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_generate_token(mocker):
|
||||
mock = mocker.patch(
|
||||
"selfprivacy_api.models.tokens.token.Token.generate",
|
||||
autospec=True,
|
||||
return_value=Token(
|
||||
token="ur71mC4aiI6FIYAN--cTL-38rPHS5D6NuB1bgN_qKF4",
|
||||
device_name="newdevice",
|
||||
created_at=datetime(2022, 11, 14, 6, 6, 32, 777123),
|
||||
),
|
||||
)
|
||||
return mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_recovery_key_generate_invalid(mocker):
|
||||
mock = mocker.patch(
|
||||
"selfprivacy_api.models.tokens.recovery_key.RecoveryKey.generate",
|
||||
autospec=True,
|
||||
return_value=RecoveryKey(
|
||||
key="889bf49c1d3199d71a2e704718772bd53a422020334db051",
|
||||
created_at=TEST_DATE,
|
||||
expires_at=None,
|
||||
uses_left=0,
|
||||
),
|
||||
)
|
||||
return mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_token_generate(mocker):
|
||||
mock = mocker.patch(
|
||||
"selfprivacy_api.models.tokens.token.Token.generate",
|
||||
autospec=True,
|
||||
return_value=Token(
|
||||
token="ZuLNKtnxDeq6w2dpOJhbB3iat_sJLPTPl_rN5uc5MvM",
|
||||
device_name="IamNewDevice",
|
||||
created_at=TOKEN_TEST_DATE,
|
||||
),
|
||||
)
|
||||
return mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_recovery_key_generate(mocker):
|
||||
mock = mocker.patch(
|
||||
"selfprivacy_api.models.tokens.recovery_key.RecoveryKey.generate",
|
||||
autospec=True,
|
||||
return_value=RecoveryKey(
|
||||
key="889bf49c1d3199d71a2e704718772bd53a422020334db051",
|
||||
created_at=TEST_DATE,
|
||||
expires_at=None,
|
||||
uses_left=1,
|
||||
),
|
||||
)
|
||||
return mock
|
||||
|
||||
|
||||
@pytest.fixture(params=["json", "redis"])
|
||||
def empty_repo(request, empty_json_repo, empty_redis_repo):
|
||||
if request.param == "json":
|
||||
return empty_json_repo
|
||||
if request.param == "redis":
|
||||
return empty_redis_repo
|
||||
# return empty_json_repo
|
||||
else:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def some_tokens_repo(empty_repo):
|
||||
for name in ORIGINAL_DEVICE_NAMES:
|
||||
empty_repo.create_token(name)
|
||||
assert len(empty_repo.get_tokens()) == len(ORIGINAL_DEVICE_NAMES)
|
||||
for name in ORIGINAL_DEVICE_NAMES:
|
||||
assert empty_repo.get_token_by_name(name) is not None
|
||||
assert empty_repo.get_new_device_key() is not None
|
||||
return empty_repo
|
||||
|
||||
|
||||
###############
|
||||
# Test tokens #
|
||||
###############
|
||||
|
||||
|
||||
def test_get_token_by_token_string(some_tokens_repo):
|
||||
repo = some_tokens_repo
|
||||
test_token = repo.get_tokens()[2]
|
||||
|
||||
assert repo.get_token_by_token_string(token_string=test_token.token) == test_token
|
||||
|
||||
|
||||
def test_get_token_by_non_existent_token_string(some_tokens_repo):
|
||||
repo = some_tokens_repo
|
||||
|
||||
with pytest.raises(TokenNotFound):
|
||||
assert repo.get_token_by_token_string(token_string="iamBadtoken") is None
|
||||
|
||||
|
||||
def test_get_token_by_name(some_tokens_repo):
|
||||
repo = some_tokens_repo
|
||||
|
||||
token = repo.get_token_by_name(token_name="primary_token")
|
||||
assert token is not None
|
||||
assert token.device_name == "primary_token"
|
||||
assert token in repo.get_tokens()
|
||||
|
||||
|
||||
def test_get_token_by_non_existent_name(some_tokens_repo):
|
||||
repo = some_tokens_repo
|
||||
|
||||
with pytest.raises(TokenNotFound):
|
||||
assert repo.get_token_by_name(token_name="badname") is None
|
||||
|
||||
|
||||
def test_is_token_valid(some_tokens_repo):
|
||||
repo = some_tokens_repo
|
||||
token = repo.get_tokens()[0]
|
||||
assert repo.is_token_valid(token.token)
|
||||
assert not repo.is_token_valid("gibberish")
|
||||
|
||||
|
||||
def test_is_token_name_pair_valid(some_tokens_repo):
|
||||
repo = some_tokens_repo
|
||||
token = repo.get_tokens()[0]
|
||||
assert repo.is_token_name_pair_valid(token.device_name, token.token)
|
||||
assert not repo.is_token_name_pair_valid(token.device_name, "gibberish")
|
||||
assert not repo.is_token_name_pair_valid("gibberish", token.token)
|
||||
|
||||
|
||||
def test_is_token_name_exists(some_tokens_repo):
|
||||
repo = some_tokens_repo
|
||||
token = repo.get_tokens()[0]
|
||||
assert repo.is_token_name_exists(token.device_name)
|
||||
assert not repo.is_token_name_exists("gibberish")
|
||||
|
||||
|
||||
def test_get_tokens(some_tokens_repo):
|
||||
repo = some_tokens_repo
|
||||
tokenstrings = []
|
||||
# we cannot insert tokens directly via api, so we check meta-properties instead
|
||||
for token in repo.get_tokens():
|
||||
len(token.token) == 43 # assuming secrets.token_urlsafe
|
||||
assert token.token not in tokenstrings
|
||||
tokenstrings.append(token.token)
|
||||
assert token.created_at.day == datetime.today().day
|
||||
|
||||
|
||||
def test_create_token(empty_repo, mock_token_generate):
|
||||
repo = empty_repo
|
||||
|
||||
assert repo.create_token(device_name="IamNewDevice") == Token(
|
||||
token="ZuLNKtnxDeq6w2dpOJhbB3iat_sJLPTPl_rN5uc5MvM",
|
||||
device_name="IamNewDevice",
|
||||
created_at=TOKEN_TEST_DATE,
|
||||
)
|
||||
assert repo.get_tokens() == [
|
||||
Token(
|
||||
token="ZuLNKtnxDeq6w2dpOJhbB3iat_sJLPTPl_rN5uc5MvM",
|
||||
device_name="IamNewDevice",
|
||||
created_at=TOKEN_TEST_DATE,
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
def test_create_token_existing(some_tokens_repo):
|
||||
repo = some_tokens_repo
|
||||
old_token = repo.get_tokens()[0]
|
||||
|
||||
new_token = repo.create_token(device_name=old_token.device_name)
|
||||
assert new_token.device_name != old_token.device_name
|
||||
|
||||
assert old_token in repo.get_tokens()
|
||||
assert new_token in repo.get_tokens()
|
||||
|
||||
|
||||
def test_delete_token(some_tokens_repo):
|
||||
repo = some_tokens_repo
|
||||
original_tokens = repo.get_tokens()
|
||||
input_token = original_tokens[1]
|
||||
|
||||
repo.delete_token(input_token)
|
||||
|
||||
tokens_after_delete = repo.get_tokens()
|
||||
for token in original_tokens:
|
||||
if token != input_token:
|
||||
assert token in tokens_after_delete
|
||||
assert len(original_tokens) == len(tokens_after_delete) + 1
|
||||
|
||||
|
||||
def test_delete_not_found_token(some_tokens_repo):
|
||||
repo = some_tokens_repo
|
||||
initial_tokens = repo.get_tokens()
|
||||
input_token = Token(
|
||||
token="imbadtoken",
|
||||
device_name="primary_token",
|
||||
created_at=TEST_DATE,
|
||||
)
|
||||
with pytest.raises(TokenNotFound):
|
||||
assert repo.delete_token(input_token) is None
|
||||
|
||||
new_tokens = repo.get_tokens()
|
||||
assert len(new_tokens) == len(initial_tokens)
|
||||
for token in initial_tokens:
|
||||
assert token in new_tokens
|
||||
|
||||
|
||||
def test_refresh_token(some_tokens_repo):
|
||||
repo = some_tokens_repo
|
||||
input_token = some_tokens_repo.get_tokens()[0]
|
||||
|
||||
output_token = repo.refresh_token(input_token)
|
||||
|
||||
assert output_token.token != input_token.token
|
||||
assert output_token.device_name == input_token.device_name
|
||||
assert output_token.created_at == input_token.created_at
|
||||
|
||||
assert output_token in repo.get_tokens()
|
||||
|
||||
|
||||
def test_refresh_not_found_token(some_tokens_repo, mock_token_generate):
|
||||
repo = some_tokens_repo
|
||||
input_token = Token(
|
||||
token="idontknowwhoiam",
|
||||
device_name="tellmewhoiam?",
|
||||
created_at=TEST_DATE,
|
||||
)
|
||||
|
||||
with pytest.raises(TokenNotFound):
|
||||
assert repo.refresh_token(input_token) is None
|
||||
|
||||
|
||||
################
|
||||
# Recovery key #
|
||||
################
|
||||
|
||||
|
||||
def test_get_recovery_key_when_empty(empty_repo):
|
||||
repo = empty_repo
|
||||
|
||||
assert repo.get_recovery_key() is None
|
||||
|
||||
|
||||
def test_create_get_recovery_key(some_tokens_repo, mock_recovery_key_generate):
|
||||
repo = some_tokens_repo
|
||||
|
||||
assert repo.create_recovery_key(uses_left=1, expiration=None) is not None
|
||||
assert repo.get_recovery_key() == RecoveryKey(
|
||||
key="889bf49c1d3199d71a2e704718772bd53a422020334db051",
|
||||
created_at=TEST_DATE,
|
||||
expires_at=None,
|
||||
uses_left=1,
|
||||
)
|
||||
|
||||
|
||||
def test_use_mnemonic_recovery_key_when_empty(empty_repo):
|
||||
repo = empty_repo
|
||||
|
||||
with pytest.raises(RecoveryKeyNotFound):
|
||||
assert (
|
||||
repo.use_mnemonic_recovery_key(
|
||||
mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb",
|
||||
device_name="primary_token",
|
||||
)
|
||||
is None
|
||||
)
|
||||
|
||||
|
||||
def test_use_mnemonic_not_valid_recovery_key(
|
||||
some_tokens_repo, mock_recovery_key_generate_invalid
|
||||
):
|
||||
repo = some_tokens_repo
|
||||
assert repo.create_recovery_key(uses_left=0, expiration=None) is not None
|
||||
|
||||
with pytest.raises(RecoveryKeyNotFound):
|
||||
assert (
|
||||
repo.use_mnemonic_recovery_key(
|
||||
mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb",
|
||||
device_name="primary_token",
|
||||
)
|
||||
is None
|
||||
)
|
||||
|
||||
|
||||
def test_use_mnemonic_expired_recovery_key(
|
||||
some_tokens_repo,
|
||||
):
|
||||
repo = some_tokens_repo
|
||||
expiration = FIVE_MINUTES_INTO_PAST
|
||||
assert repo.create_recovery_key(uses_left=2, expiration=expiration) is not None
|
||||
recovery_key = repo.get_recovery_key()
|
||||
# TODO: do not ignore timezone once json backend is deleted
|
||||
assert recovery_key.expires_at.replace(tzinfo=None) == expiration.replace(
|
||||
tzinfo=None
|
||||
)
|
||||
assert not repo.is_recovery_key_valid()
|
||||
|
||||
with pytest.raises(RecoveryKeyNotFound):
|
||||
token = repo.use_mnemonic_recovery_key(
|
||||
mnemonic_phrase=mnemonic_from_hex(recovery_key.key),
|
||||
device_name="newdevice",
|
||||
)
|
||||
|
||||
|
||||
def test_use_mnemonic_not_mnemonic_recovery_key(some_tokens_repo):
|
||||
repo = some_tokens_repo
|
||||
assert repo.create_recovery_key(uses_left=1, expiration=None) is not None
|
||||
|
||||
with pytest.raises(InvalidMnemonic):
|
||||
assert (
|
||||
repo.use_mnemonic_recovery_key(
|
||||
mnemonic_phrase="sorry, it was joke",
|
||||
device_name="primary_token",
|
||||
)
|
||||
is None
|
||||
)
|
||||
|
||||
|
||||
def test_use_not_mnemonic_recovery_key(some_tokens_repo):
|
||||
repo = some_tokens_repo
|
||||
assert repo.create_recovery_key(uses_left=1, expiration=None) is not None
|
||||
|
||||
with pytest.raises(InvalidMnemonic):
|
||||
assert (
|
||||
repo.use_mnemonic_recovery_key(
|
||||
mnemonic_phrase="please come back",
|
||||
device_name="primary_token",
|
||||
)
|
||||
is None
|
||||
)
|
||||
|
||||
|
||||
def test_use_not_found_mnemonic_recovery_key(some_tokens_repo):
|
||||
repo = some_tokens_repo
|
||||
assert repo.create_recovery_key(uses_left=1, expiration=None) is not None
|
||||
|
||||
with pytest.raises(RecoveryKeyNotFound):
|
||||
assert (
|
||||
repo.use_mnemonic_recovery_key(
|
||||
mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb",
|
||||
device_name="primary_token",
|
||||
)
|
||||
is None
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(params=["recovery_uses_1", "recovery_eternal"])
|
||||
def recovery_key_uses_left(request):
|
||||
if request.param == "recovery_uses_1":
|
||||
return 1
|
||||
if request.param == "recovery_eternal":
|
||||
return None
|
||||
|
||||
|
||||
def test_use_mnemonic_recovery_key(some_tokens_repo, recovery_key_uses_left):
|
||||
repo = some_tokens_repo
|
||||
assert (
|
||||
repo.create_recovery_key(uses_left=recovery_key_uses_left, expiration=None)
|
||||
is not None
|
||||
)
|
||||
assert repo.is_recovery_key_valid()
|
||||
recovery_key = repo.get_recovery_key()
|
||||
|
||||
token = repo.use_mnemonic_recovery_key(
|
||||
mnemonic_phrase=mnemonic_from_hex(recovery_key.key),
|
||||
device_name="newdevice",
|
||||
)
|
||||
|
||||
assert token.device_name == "newdevice"
|
||||
assert token in repo.get_tokens()
|
||||
new_uses = None
|
||||
if recovery_key_uses_left is not None:
|
||||
new_uses = recovery_key_uses_left - 1
|
||||
assert repo.get_recovery_key() == RecoveryKey(
|
||||
key=recovery_key.key,
|
||||
created_at=recovery_key.created_at,
|
||||
expires_at=None,
|
||||
uses_left=new_uses,
|
||||
)
|
||||
|
||||
|
||||
##################
|
||||
# New device key #
|
||||
##################
|
||||
|
||||
|
||||
def test_get_new_device_key(some_tokens_repo, mock_new_device_key_generate):
|
||||
repo = some_tokens_repo
|
||||
|
||||
assert repo.get_new_device_key() == NewDeviceKey(
|
||||
key="43478d05b35e4781598acd76e33832bb",
|
||||
created_at=TEST_DATE,
|
||||
expires_at=TEST_DATE,
|
||||
)
|
||||
|
||||
|
||||
def test_delete_new_device_key(some_tokens_repo):
|
||||
repo = some_tokens_repo
|
||||
|
||||
assert repo.delete_new_device_key() is None
|
||||
# we cannot say if there is ot not without creating it?
|
||||
|
||||
|
||||
def test_delete_new_device_key_when_empty(empty_repo):
|
||||
repo = empty_repo
|
||||
|
||||
assert repo.delete_new_device_key() is None
|
||||
|
||||
|
||||
def test_use_invalid_mnemonic_new_device_key(some_tokens_repo):
|
||||
repo = some_tokens_repo
|
||||
|
||||
with pytest.raises(InvalidMnemonic):
|
||||
assert (
|
||||
repo.use_mnemonic_new_device_key(
|
||||
device_name="imnew",
|
||||
mnemonic_phrase="oh-no",
|
||||
)
|
||||
is None
|
||||
)
|
||||
|
||||
|
||||
def test_use_not_exists_mnemonic_new_device_key(
|
||||
empty_repo, mock_new_device_key_generate
|
||||
):
|
||||
repo = empty_repo
|
||||
assert repo.get_new_device_key() is not None
|
||||
|
||||
with pytest.raises(NewDeviceKeyNotFound):
|
||||
assert (
|
||||
repo.use_mnemonic_new_device_key(
|
||||
device_name="imnew",
|
||||
mnemonic_phrase="uniform clarify napkin bid dress search input armor police cross salon because myself uphold slice bamboo hungry park",
|
||||
)
|
||||
is None
|
||||
)
|
||||
|
||||
|
||||
def test_use_mnemonic_new_device_key(empty_repo):
|
||||
repo = empty_repo
|
||||
key = repo.get_new_device_key()
|
||||
assert key is not None
|
||||
|
||||
mnemonic_phrase = mnemonic_from_hex(key.key)
|
||||
|
||||
new_token = repo.use_mnemonic_new_device_key(
|
||||
device_name="imnew",
|
||||
mnemonic_phrase=mnemonic_phrase,
|
||||
)
|
||||
|
||||
assert new_token.device_name == "imnew"
|
||||
assert new_token in repo.get_tokens()
|
||||
|
||||
# we must delete the key after use
|
||||
with pytest.raises(NewDeviceKeyNotFound):
|
||||
assert (
|
||||
repo.use_mnemonic_new_device_key(
|
||||
device_name="imnew",
|
||||
mnemonic_phrase=mnemonic_phrase,
|
||||
)
|
||||
is None
|
||||
)
|
||||
|
||||
|
||||
def test_use_mnemonic_expired_new_device_key(
|
||||
some_tokens_repo,
|
||||
):
|
||||
repo = some_tokens_repo
|
||||
expiration = FIVE_MINUTES_INTO_PAST
|
||||
|
||||
key = repo.get_new_device_key()
|
||||
assert key is not None
|
||||
assert key.expires_at is not None
|
||||
key.expires_at = expiration
|
||||
assert not key.is_valid()
|
||||
repo._store_new_device_key(key)
|
||||
|
||||
with pytest.raises(NewDeviceKeyNotFound):
|
||||
token = repo.use_mnemonic_new_device_key(
|
||||
mnemonic_phrase=mnemonic_from_hex(key.key),
|
||||
device_name="imnew",
|
||||
)
|
||||
|
||||
|
||||
def test_use_mnemonic_new_device_key_when_empty(empty_repo):
|
||||
repo = empty_repo
|
||||
|
||||
with pytest.raises(NewDeviceKeyNotFound):
|
||||
assert (
|
||||
repo.use_mnemonic_new_device_key(
|
||||
device_name="imnew",
|
||||
mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb",
|
||||
)
|
||||
is None
|
||||
)
|
||||
|
||||
|
||||
def assert_identical(
|
||||
repo_a: AbstractTokensRepository, repo_b: AbstractTokensRepository
|
||||
):
|
||||
tokens_a = repo_a.get_tokens()
|
||||
tokens_b = repo_b.get_tokens()
|
||||
assert len(tokens_a) == len(tokens_b)
|
||||
for token in tokens_a:
|
||||
assert token in tokens_b
|
||||
assert repo_a.get_recovery_key() == repo_b.get_recovery_key()
|
||||
assert repo_a._get_stored_new_device_key() == repo_b._get_stored_new_device_key()
|
||||
|
||||
|
||||
def clone_to_redis(repo: JsonTokensRepository):
|
||||
other_repo = RedisTokensRepository()
|
||||
other_repo.clone(repo)
|
||||
assert_identical(repo, other_repo)
|
||||
|
||||
|
||||
# we cannot easily parametrize this unfortunately, since some_tokens and empty_repo cannot coexist
|
||||
def test_clone_json_to_redis_empty(empty_repo):
|
||||
repo = empty_repo
|
||||
if isinstance(repo, JsonTokensRepository):
|
||||
clone_to_redis(repo)
|
||||
|
||||
|
||||
def test_clone_json_to_redis_full(some_tokens_repo):
|
||||
repo = some_tokens_repo
|
||||
if isinstance(repo, JsonTokensRepository):
|
||||
repo.get_new_device_key()
|
||||
repo.create_recovery_key(FIVE_MINUTES_INTO_FUTURE, 2)
|
||||
clone_to_redis(repo)
|
|
@ -0,0 +1,26 @@
|
|||
{
|
||||
"tokens": [
|
||||
{
|
||||
"token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI",
|
||||
"name": "primary_token",
|
||||
"date": "2022-07-15 17:41:31.675698"
|
||||
},
|
||||
{
|
||||
"token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68",
|
||||
"name": "second_token",
|
||||
"date": "2022-07-15 17:41:31.675698Z"
|
||||
},
|
||||
{
|
||||
"token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8",
|
||||
"name": "third_token",
|
||||
"date": "2022-07-15T17:41:31.675698Z"
|
||||
},
|
||||
{
|
||||
"token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM",
|
||||
"name": "forth_token",
|
||||
"date": "2022-07-15T17:41:31.675698"
|
||||
}
|
||||
],
|
||||
"recovery_token": null,
|
||||
"new_device": null
|
||||
}
|
|
@ -0,0 +1,35 @@
|
|||
{
|
||||
"tokens": [
|
||||
{
|
||||
"token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI",
|
||||
"name": "primary_token",
|
||||
"date": "2022-07-15 17:41:31.675698"
|
||||
},
|
||||
{
|
||||
"token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68",
|
||||
"name": "second_token",
|
||||
"date": "2022-07-15 17:41:31.675698Z"
|
||||
},
|
||||
{
|
||||
"token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8",
|
||||
"name": "third_token",
|
||||
"date": "2022-07-15T17:41:31.675698Z"
|
||||
},
|
||||
{
|
||||
"token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM",
|
||||
"name": "forth_token",
|
||||
"date": "2022-07-15T17:41:31.675698"
|
||||
}
|
||||
],
|
||||
"recovery_token": {
|
||||
"token": "ed653e4b8b042b841d285fa7a682fa09e925ddb2d8906f54",
|
||||
"date": "2022-11-11T11:48:54.228038",
|
||||
"expiration": null,
|
||||
"uses_left": 2
|
||||
},
|
||||
"new_device": {
|
||||
"token": "2237238de23dc71ab558e317bdb8ff8e",
|
||||
"date": "2022-10-26 20:50:47.973212",
|
||||
"expiration": "2022-10-26 21:00:47.974153"
|
||||
}
|
||||
}
|
|
@ -3,6 +3,7 @@
|
|||
import pytest
|
||||
|
||||
from tests.common import read_json
|
||||
from tests.test_graphql.common import assert_empty
|
||||
|
||||
|
||||
class ProcessMock:
|
||||
|
@ -70,8 +71,7 @@ def test_graphql_add_ssh_key_unauthorized(client, some_users, mock_subprocess_po
|
|||
},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is None
|
||||
assert_empty(response)
|
||||
|
||||
|
||||
def test_graphql_add_ssh_key(authorized_client, some_users, mock_subprocess_popen):
|
||||
|
@ -227,8 +227,7 @@ def test_graphql_remove_ssh_key_unauthorized(client, some_users, mock_subprocess
|
|||
},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is None
|
||||
assert_empty(response)
|
||||
|
||||
|
||||
def test_graphql_remove_ssh_key(authorized_client, some_users, mock_subprocess_popen):
|
||||
|
|
|
@ -1,9 +1,4 @@
|
|||
{
|
||||
"backblaze": {
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
},
|
||||
"api": {
|
||||
"token": "TEST_TOKEN",
|
||||
"enableSwagger": false
|
||||
|
@ -11,9 +6,6 @@
|
|||
"bitwarden": {
|
||||
"enable": false
|
||||
},
|
||||
"cloudflare": {
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"databasePassword": "PASSWORD",
|
||||
"domain": "test.tld",
|
||||
"hashedMasterPassword": "HASHED_PASSWORD",
|
||||
|
@ -67,5 +59,18 @@
|
|||
"username": "user3",
|
||||
"hashedPassword": "HASHED_PASSWORD_3"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"dns": {
|
||||
"provider": "CLOUDFLARE",
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"server": {
|
||||
"provider": "HETZNER"
|
||||
},
|
||||
"backup": {
|
||||
"provider": "BACKBLAZE",
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,6 +5,7 @@ import os
|
|||
import pytest
|
||||
|
||||
from tests.common import generate_system_query, read_json
|
||||
from tests.test_graphql.common import assert_empty
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
@ -144,8 +145,7 @@ def test_graphql_get_python_version_wrong_auth(
|
|||
"query": generate_system_query([API_PYTHON_VERSION_INFO]),
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is None
|
||||
assert_empty(response)
|
||||
|
||||
|
||||
def test_graphql_get_python_version(authorized_client, mock_subprocess_check_output):
|
||||
|
@ -181,8 +181,7 @@ def test_graphql_get_system_version_unauthorized(
|
|||
},
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is None
|
||||
assert_empty(response)
|
||||
|
||||
assert mock_subprocess_check_output.call_count == 0
|
||||
|
||||
|
@ -348,8 +347,7 @@ def test_graphql_get_timezone_unauthorized(client, turned_on):
|
|||
"query": generate_system_query([API_GET_TIMEZONE]),
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is None
|
||||
assert_empty(response)
|
||||
|
||||
|
||||
def test_graphql_get_timezone(authorized_client, turned_on):
|
||||
|
@ -403,8 +401,7 @@ def test_graphql_change_timezone_unauthorized(client, turned_on):
|
|||
},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is None
|
||||
assert_empty(response)
|
||||
|
||||
|
||||
def test_graphql_change_timezone(authorized_client, turned_on):
|
||||
|
@ -507,8 +504,7 @@ def test_graphql_get_auto_upgrade_unauthorized(client, turned_on):
|
|||
"query": generate_system_query([API_GET_AUTO_UPGRADE_SETTINGS_QUERY]),
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is None
|
||||
assert_empty(response)
|
||||
|
||||
|
||||
def test_graphql_get_auto_upgrade(authorized_client, turned_on):
|
||||
|
@ -614,8 +610,7 @@ def test_graphql_change_auto_upgrade_unauthorized(client, turned_on):
|
|||
},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is None
|
||||
assert_empty(response)
|
||||
|
||||
|
||||
def test_graphql_change_auto_upgrade(authorized_client, turned_on):
|
||||
|
@ -843,8 +838,7 @@ def test_graphql_pull_system_configuration_unauthorized(client, mock_subprocess_
|
|||
},
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is None
|
||||
assert_empty(response)
|
||||
assert mock_subprocess_popen.call_count == 0
|
||||
|
||||
|
||||
|
|
|
@ -1,9 +1,4 @@
|
|||
{
|
||||
"backblaze": {
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
},
|
||||
"api": {
|
||||
"token": "TEST_TOKEN",
|
||||
"enableSwagger": false
|
||||
|
@ -11,9 +6,6 @@
|
|||
"bitwarden": {
|
||||
"enable": true
|
||||
},
|
||||
"cloudflare": {
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"databasePassword": "PASSWORD",
|
||||
"domain": "test.tld",
|
||||
"hashedMasterPassword": "HASHED_PASSWORD",
|
||||
|
@ -46,5 +38,18 @@
|
|||
"timezone": "Europe/Moscow",
|
||||
"sshKeys": [
|
||||
"ssh-rsa KEY test@pc"
|
||||
]
|
||||
}
|
||||
],
|
||||
"dns": {
|
||||
"provider": "CLOUDFLARE",
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"server": {
|
||||
"provider": "HETZNER"
|
||||
},
|
||||
"backup": {
|
||||
"provider": "BACKBLAZE",
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,9 +1,4 @@
|
|||
{
|
||||
"backblaze": {
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
},
|
||||
"api": {
|
||||
"token": "TEST_TOKEN",
|
||||
"enableSwagger": false
|
||||
|
@ -11,9 +6,6 @@
|
|||
"bitwarden": {
|
||||
"enable": true
|
||||
},
|
||||
"cloudflare": {
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"databasePassword": "PASSWORD",
|
||||
"domain": "test.tld",
|
||||
"hashedMasterPassword": "HASHED_PASSWORD",
|
||||
|
@ -48,5 +40,18 @@
|
|||
"timezone": "Europe/Moscow",
|
||||
"sshKeys": [
|
||||
"ssh-rsa KEY test@pc"
|
||||
]
|
||||
}
|
||||
],
|
||||
"dns": {
|
||||
"provider": "CLOUDFLARE",
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"server": {
|
||||
"provider": "HETZNER"
|
||||
},
|
||||
"backup": {
|
||||
"provider": "BACKBLAZE",
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,9 +1,4 @@
|
|||
{
|
||||
"backblaze": {
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
},
|
||||
"api": {
|
||||
"token": "TEST_TOKEN",
|
||||
"enableSwagger": false
|
||||
|
@ -11,9 +6,6 @@
|
|||
"bitwarden": {
|
||||
"enable": true
|
||||
},
|
||||
"cloudflare": {
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"databasePassword": "PASSWORD",
|
||||
"domain": "test.tld",
|
||||
"hashedMasterPassword": "HASHED_PASSWORD",
|
||||
|
@ -51,5 +43,18 @@
|
|||
"timezone": "Europe/Moscow",
|
||||
"sshKeys": [
|
||||
"ssh-rsa KEY test@pc"
|
||||
]
|
||||
],
|
||||
"dns": {
|
||||
"provider": "CLOUDFLARE",
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"server": {
|
||||
"provider": "HETZNER"
|
||||
},
|
||||
"backup": {
|
||||
"provider": "BACKBLAZE",
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,9 +1,4 @@
|
|||
{
|
||||
"backblaze": {
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
},
|
||||
"api": {
|
||||
"token": "TEST_TOKEN",
|
||||
"enableSwagger": false
|
||||
|
@ -11,9 +6,6 @@
|
|||
"bitwarden": {
|
||||
"enable": true
|
||||
},
|
||||
"cloudflare": {
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"databasePassword": "PASSWORD",
|
||||
"domain": "test.tld",
|
||||
"hashedMasterPassword": "HASHED_PASSWORD",
|
||||
|
@ -43,5 +35,18 @@
|
|||
},
|
||||
"sshKeys": [
|
||||
"ssh-rsa KEY test@pc"
|
||||
]
|
||||
}
|
||||
],
|
||||
"dns": {
|
||||
"provider": "CLOUDFLARE",
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"server": {
|
||||
"provider": "HETZNER"
|
||||
},
|
||||
"backup": {
|
||||
"provider": "BACKBLAZE",
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,6 +6,7 @@ from tests.common import (
|
|||
generate_users_query,
|
||||
read_json,
|
||||
)
|
||||
from tests.test_graphql.common import assert_empty
|
||||
|
||||
invalid_usernames = [
|
||||
"messagebus",
|
||||
|
@ -125,8 +126,7 @@ def test_graphql_get_users_unauthorized(client, some_users, mock_subprocess_pope
|
|||
"query": generate_users_query([API_USERS_INFO]),
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is None
|
||||
assert_empty(response)
|
||||
|
||||
|
||||
def test_graphql_get_some_users(authorized_client, some_users, mock_subprocess_popen):
|
||||
|
@ -192,8 +192,7 @@ def test_graphql_get_one_user_unauthorized(client, one_user, mock_subprocess_pop
|
|||
},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is None
|
||||
assert_empty(response)
|
||||
|
||||
|
||||
def test_graphql_get_one_user(authorized_client, one_user, mock_subprocess_popen):
|
||||
|
@ -321,8 +320,7 @@ def test_graphql_add_user_unauthorize(client, one_user, mock_subprocess_popen):
|
|||
},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is None
|
||||
assert_empty(response)
|
||||
|
||||
|
||||
def test_graphql_add_user(authorized_client, one_user, mock_subprocess_popen):
|
||||
|
@ -516,7 +514,6 @@ def test_graphql_add_long_username(authorized_client, one_user, mock_subprocess_
|
|||
},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is not None
|
||||
|
||||
assert response.json()["data"]["createUser"]["message"] is not None
|
||||
|
@ -571,8 +568,7 @@ def test_graphql_delete_user_unauthorized(client, some_users, mock_subprocess_po
|
|||
"variables": {"username": "user1"},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is None
|
||||
assert_empty(response)
|
||||
|
||||
|
||||
def test_graphql_delete_user(authorized_client, some_users, mock_subprocess_popen):
|
||||
|
@ -676,8 +672,7 @@ def test_graphql_update_user_unauthorized(client, some_users, mock_subprocess_po
|
|||
},
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json().get("data") is None
|
||||
assert_empty(response)
|
||||
|
||||
|
||||
def test_graphql_update_user(authorized_client, some_users, mock_subprocess_popen):
|
||||
|
|
|
@ -1,9 +1,4 @@
|
|||
{
|
||||
"backblaze": {
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
},
|
||||
"api": {
|
||||
"token": "TEST_TOKEN",
|
||||
"enableSwagger": false
|
||||
|
@ -11,9 +6,6 @@
|
|||
"bitwarden": {
|
||||
"enable": false
|
||||
},
|
||||
"cloudflare": {
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"databasePassword": "PASSWORD",
|
||||
"domain": "test.tld",
|
||||
"hashedMasterPassword": "HASHED_PASSWORD",
|
||||
|
@ -50,5 +42,18 @@
|
|||
"ssh-rsa KEY test@pc"
|
||||
],
|
||||
"users": [
|
||||
]
|
||||
}
|
||||
],
|
||||
"dns": {
|
||||
"provider": "CLOUDFLARE",
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"server": {
|
||||
"provider": "HETZNER"
|
||||
},
|
||||
"backup": {
|
||||
"provider": "BACKBLAZE",
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,9 +1,4 @@
|
|||
{
|
||||
"backblaze": {
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
},
|
||||
"api": {
|
||||
"token": "TEST_TOKEN",
|
||||
"enableSwagger": false
|
||||
|
@ -11,9 +6,6 @@
|
|||
"bitwarden": {
|
||||
"enable": false
|
||||
},
|
||||
"cloudflare": {
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"databasePassword": "PASSWORD",
|
||||
"domain": "test.tld",
|
||||
"hashedMasterPassword": "HASHED_PASSWORD",
|
||||
|
@ -57,5 +49,18 @@
|
|||
"ssh-rsa KEY user1@pc"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"dns": {
|
||||
"provider": "CLOUDFLARE",
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"server": {
|
||||
"provider": "HETZNER"
|
||||
},
|
||||
"backup": {
|
||||
"provider": "BACKBLAZE",
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,9 +1,4 @@
|
|||
{
|
||||
"backblaze": {
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
},
|
||||
"api": {
|
||||
"token": "TEST_TOKEN",
|
||||
"enableSwagger": false
|
||||
|
@ -11,9 +6,6 @@
|
|||
"bitwarden": {
|
||||
"enable": false
|
||||
},
|
||||
"cloudflare": {
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"databasePassword": "PASSWORD",
|
||||
"domain": "test.tld",
|
||||
"hashedMasterPassword": "HASHED_PASSWORD",
|
||||
|
@ -67,5 +59,18 @@
|
|||
"username": "user3",
|
||||
"hashedPassword": "HASHED_PASSWORD_3"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"dns": {
|
||||
"provider": "CLOUDFLARE",
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"server": {
|
||||
"provider": "HETZNER"
|
||||
},
|
||||
"backup": {
|
||||
"provider": "BACKBLAZE",
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,9 +1,4 @@
|
|||
{
|
||||
"backblaze": {
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
},
|
||||
"api": {
|
||||
"token": "TEST_TOKEN",
|
||||
"enableSwagger": false
|
||||
|
@ -11,9 +6,6 @@
|
|||
"bitwarden": {
|
||||
"enable": false
|
||||
},
|
||||
"cloudflare": {
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"databasePassword": "PASSWORD",
|
||||
"domain": "test.tld",
|
||||
"hashedMasterPassword": "HASHED_PASSWORD",
|
||||
|
@ -48,5 +40,18 @@
|
|||
"timezone": "Europe/Moscow",
|
||||
"sshKeys": [
|
||||
"ssh-rsa KEY test@pc"
|
||||
]
|
||||
}
|
||||
],
|
||||
"dns": {
|
||||
"provider": "CLOUDFLARE",
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"server": {
|
||||
"provider": "HETZNER"
|
||||
},
|
||||
"backup": {
|
||||
"provider": "BACKBLAZE",
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,16 +1,96 @@
|
|||
# pylint: disable=redefined-outer-name
|
||||
# pylint: disable=unused-argument
|
||||
import json
|
||||
import pytest
|
||||
|
||||
from selfprivacy_api.utils import WriteUserData, ReadUserData
|
||||
from selfprivacy_api.jobs import Jobs, JobStatus
|
||||
import selfprivacy_api.jobs as jobsmodule
|
||||
|
||||
|
||||
def test_jobs(authorized_client, jobs_file, shared_datadir):
|
||||
jobs = Jobs()
|
||||
def test_add_reset(jobs_with_one_job):
|
||||
jobs_with_one_job.reset()
|
||||
assert jobs_with_one_job.get_jobs() == []
|
||||
|
||||
|
||||
def test_minimal_update(jobs_with_one_job):
|
||||
jobs = jobs_with_one_job
|
||||
test_job = jobs_with_one_job.get_jobs()[0]
|
||||
|
||||
jobs.update(job=test_job, status=JobStatus.ERROR)
|
||||
|
||||
assert jobs.get_jobs() == [test_job]
|
||||
|
||||
|
||||
def test_remove_by_uid(jobs_with_one_job):
|
||||
test_job = jobs_with_one_job.get_jobs()[0]
|
||||
uid_str = str(test_job.uid)
|
||||
|
||||
assert jobs_with_one_job.remove_by_uid(uid_str)
|
||||
assert jobs_with_one_job.get_jobs() == []
|
||||
assert not jobs_with_one_job.remove_by_uid(uid_str)
|
||||
|
||||
|
||||
def test_remove_update_nonexistent(jobs_with_one_job):
|
||||
test_job = jobs_with_one_job.get_jobs()[0]
|
||||
|
||||
jobs_with_one_job.remove(test_job)
|
||||
assert jobs_with_one_job.get_jobs() == []
|
||||
|
||||
result = jobs_with_one_job.update(job=test_job, status=JobStatus.ERROR)
|
||||
assert result == test_job # even though we might consider changing this behavior
|
||||
|
||||
|
||||
def test_remove_get_nonexistent(jobs_with_one_job):
|
||||
test_job = jobs_with_one_job.get_jobs()[0]
|
||||
uid_str = str(test_job.uid)
|
||||
assert jobs_with_one_job.get_job(uid_str) == test_job
|
||||
|
||||
jobs_with_one_job.remove(test_job)
|
||||
|
||||
assert jobs_with_one_job.get_job(uid_str) is None
|
||||
|
||||
|
||||
def test_jobs(jobs_with_one_job):
|
||||
jobs = jobs_with_one_job
|
||||
test_job = jobs_with_one_job.get_jobs()[0]
|
||||
assert not jobs.is_busy()
|
||||
|
||||
jobs.update(
|
||||
job=test_job,
|
||||
name="Write Tests",
|
||||
description="An oddly satisfying experience",
|
||||
status=JobStatus.RUNNING,
|
||||
status_text="Status text",
|
||||
progress=50,
|
||||
)
|
||||
|
||||
assert jobs.get_jobs() == [test_job]
|
||||
assert jobs.is_busy()
|
||||
|
||||
backup = jobsmodule.JOB_EXPIRATION_SECONDS
|
||||
jobsmodule.JOB_EXPIRATION_SECONDS = 0
|
||||
|
||||
jobs.update(
|
||||
job=test_job,
|
||||
status=JobStatus.FINISHED,
|
||||
status_text="Yaaay!",
|
||||
progress=100,
|
||||
)
|
||||
|
||||
assert jobs.get_jobs() == []
|
||||
jobsmodule.JOB_EXPIRATION_SECONDS = backup
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def jobs():
|
||||
j = Jobs()
|
||||
j.reset()
|
||||
assert j.get_jobs() == []
|
||||
yield j
|
||||
j.reset()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def jobs_with_one_job(jobs):
|
||||
test_job = jobs.add(
|
||||
type_id="test",
|
||||
name="Test job",
|
||||
|
@ -19,32 +99,5 @@ def test_jobs(authorized_client, jobs_file, shared_datadir):
|
|||
status_text="Status text",
|
||||
progress=0,
|
||||
)
|
||||
|
||||
assert jobs.get_jobs() == [test_job]
|
||||
|
||||
jobs.update(
|
||||
job=test_job,
|
||||
status=JobStatus.RUNNING,
|
||||
status_text="Status text",
|
||||
progress=50,
|
||||
)
|
||||
|
||||
assert jobs.get_jobs() == [test_job]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_subprocess_run(mocker):
|
||||
mock = mocker.patch("subprocess.run", autospec=True)
|
||||
return mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_shutil_move(mocker):
|
||||
mock = mocker.patch("shutil.move", autospec=True)
|
||||
return mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_shutil_chown(mocker):
|
||||
mock = mocker.patch("shutil.chown", autospec=True)
|
||||
return mock
|
||||
return jobs
|
||||
|
|
|
@ -0,0 +1,18 @@
|
|||
import pytest
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from selfprivacy_api.models.tokens.recovery_key import RecoveryKey
|
||||
from selfprivacy_api.models.tokens.new_device_key import NewDeviceKey
|
||||
|
||||
|
||||
def test_recovery_key_expired():
|
||||
expiration = datetime.now() - timedelta(minutes=5)
|
||||
key = RecoveryKey.generate(expiration=expiration, uses_left=2)
|
||||
assert not key.is_valid()
|
||||
|
||||
|
||||
def test_new_device_key_expired():
|
||||
expiration = datetime.now() - timedelta(minutes=5)
|
||||
key = NewDeviceKey.generate()
|
||||
key.expires_at = expiration
|
||||
assert not key.is_valid()
|
|
@ -1,14 +0,0 @@
|
|||
{
|
||||
"tokens": [
|
||||
{
|
||||
"token": "TEST_TOKEN",
|
||||
"name": "test_token",
|
||||
"date": "2022-01-14 08:31:10.789314"
|
||||
},
|
||||
{
|
||||
"token": "TEST_TOKEN2",
|
||||
"name": "test_token2",
|
||||
"date": "2022-01-14 08:31:10.789314"
|
||||
}
|
||||
]
|
||||
}
|
|
@ -1,9 +0,0 @@
|
|||
{
|
||||
"tokens": [
|
||||
{
|
||||
"token": "TEST_TOKEN",
|
||||
"name": "Test Token",
|
||||
"date": "2022-01-14 08:31:10.789314"
|
||||
}
|
||||
]
|
||||
}
|
|
@ -1,18 +1,10 @@
|
|||
{
|
||||
"backblaze": {
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
},
|
||||
"api": {
|
||||
"token": "TEST_TOKEN",
|
||||
"enableSwagger": false
|
||||
},
|
||||
"bitwarden": {
|
||||
},
|
||||
"cloudflare": {
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"databasePassword": "PASSWORD",
|
||||
"domain": "test.tld",
|
||||
"hashedMasterPassword": "HASHED_PASSWORD",
|
||||
|
@ -47,5 +39,18 @@
|
|||
"timezone": "Europe/Moscow",
|
||||
"sshKeys": [
|
||||
"ssh-rsa KEY test@pc"
|
||||
]
|
||||
],
|
||||
"dns": {
|
||||
"provider": "CLOUDFLARE",
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"server": {
|
||||
"provider": "HETZNER"
|
||||
},
|
||||
"backup": {
|
||||
"provider": "BACKBLAZE",
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
}
|
||||
}
|
|
@ -1,9 +1,4 @@
|
|||
{
|
||||
"backblaze": {
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
},
|
||||
"api": {
|
||||
"token": "TEST_TOKEN",
|
||||
"enableSwagger": false
|
||||
|
@ -11,9 +6,6 @@
|
|||
"bitwarden": {
|
||||
"enable": false
|
||||
},
|
||||
"cloudflare": {
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"databasePassword": "PASSWORD",
|
||||
"domain": "test.tld",
|
||||
"hashedMasterPassword": "HASHED_PASSWORD",
|
||||
|
@ -48,5 +40,18 @@
|
|||
"timezone": "Europe/Moscow",
|
||||
"sshKeys": [
|
||||
"ssh-rsa KEY test@pc"
|
||||
]
|
||||
],
|
||||
"dns": {
|
||||
"provider": "CLOUDFLARE",
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"server": {
|
||||
"provider": "HETZNER"
|
||||
},
|
||||
"backup": {
|
||||
"provider": "BACKBLAZE",
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
}
|
||||
}
|
|
@ -1,9 +1,4 @@
|
|||
{
|
||||
"backblaze": {
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
},
|
||||
"api": {
|
||||
"token": "TEST_TOKEN",
|
||||
"enableSwagger": false
|
||||
|
@ -11,9 +6,6 @@
|
|||
"bitwarden": {
|
||||
"enable": true
|
||||
},
|
||||
"cloudflare": {
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"databasePassword": "PASSWORD",
|
||||
"domain": "test.tld",
|
||||
"hashedMasterPassword": "HASHED_PASSWORD",
|
||||
|
@ -48,5 +40,18 @@
|
|||
"timezone": "Europe/Moscow",
|
||||
"sshKeys": [
|
||||
"ssh-rsa KEY test@pc"
|
||||
]
|
||||
],
|
||||
"dns": {
|
||||
"provider": "CLOUDFLARE",
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"server": {
|
||||
"provider": "HETZNER"
|
||||
},
|
||||
"backup": {
|
||||
"provider": "BACKBLAZE",
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
}
|
||||
}
|
|
@ -1,16 +1,8 @@
|
|||
{
|
||||
"backblaze": {
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
},
|
||||
"api": {
|
||||
"token": "TEST_TOKEN",
|
||||
"enableSwagger": false
|
||||
},
|
||||
"cloudflare": {
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"databasePassword": "PASSWORD",
|
||||
"domain": "test.tld",
|
||||
"hashedMasterPassword": "HASHED_PASSWORD",
|
||||
|
@ -45,5 +37,18 @@
|
|||
"timezone": "Europe/Moscow",
|
||||
"sshKeys": [
|
||||
"ssh-rsa KEY test@pc"
|
||||
]
|
||||
],
|
||||
"dns": {
|
||||
"provider": "CLOUDFLARE",
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"server": {
|
||||
"provider": "HETZNER"
|
||||
},
|
||||
"backup": {
|
||||
"provider": "BACKBLAZE",
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
}
|
||||
}
|
|
@ -1,9 +1,4 @@
|
|||
{
|
||||
"backblaze": {
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
},
|
||||
"api": {
|
||||
"token": "TEST_TOKEN",
|
||||
"enableSwagger": false
|
||||
|
@ -11,9 +6,6 @@
|
|||
"bitwarden": {
|
||||
"enable": false
|
||||
},
|
||||
"cloudflare": {
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"databasePassword": "PASSWORD",
|
||||
"domain": "test.tld",
|
||||
"hashedMasterPassword": "HASHED_PASSWORD",
|
||||
|
@ -47,5 +39,18 @@
|
|||
"timezone": "Europe/Moscow",
|
||||
"sshKeys": [
|
||||
"ssh-rsa KEY test@pc"
|
||||
]
|
||||
],
|
||||
"dns": {
|
||||
"provider": "CLOUDFLARE",
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"server": {
|
||||
"provider": "HETZNER"
|
||||
},
|
||||
"backup": {
|
||||
"provider": "BACKBLAZE",
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
}
|
||||
}
|
|
@ -1,9 +1,4 @@
|
|||
{
|
||||
"backblaze": {
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
},
|
||||
"api": {
|
||||
"token": "TEST_TOKEN",
|
||||
"enableSwagger": false
|
||||
|
@ -11,9 +6,6 @@
|
|||
"bitwarden": {
|
||||
"enable": false
|
||||
},
|
||||
"cloudflare": {
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"databasePassword": "PASSWORD",
|
||||
"domain": "test.tld",
|
||||
"hashedMasterPassword": "HASHED_PASSWORD",
|
||||
|
@ -48,5 +40,18 @@
|
|||
"timezone": "Europe/Moscow",
|
||||
"sshKeys": [
|
||||
"ssh-rsa KEY test@pc"
|
||||
]
|
||||
],
|
||||
"dns": {
|
||||
"provider": "CLOUDFLARE",
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"server": {
|
||||
"provider": "HETZNER"
|
||||
},
|
||||
"backup": {
|
||||
"provider": "BACKBLAZE",
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
}
|
||||
}
|
|
@ -1,9 +1,4 @@
|
|||
{
|
||||
"backblaze": {
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
},
|
||||
"api": {
|
||||
"token": "TEST_TOKEN",
|
||||
"enableSwagger": false
|
||||
|
@ -11,9 +6,6 @@
|
|||
"bitwarden": {
|
||||
"enable": false
|
||||
},
|
||||
"cloudflare": {
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"databasePassword": "PASSWORD",
|
||||
"domain": "test.tld",
|
||||
"hashedMasterPassword": "HASHED_PASSWORD",
|
||||
|
@ -48,5 +40,18 @@
|
|||
"timezone": "Europe/Moscow",
|
||||
"sshKeys": [
|
||||
"ssh-rsa KEY test@pc"
|
||||
]
|
||||
],
|
||||
"dns": {
|
||||
"provider": "CLOUDFLARE",
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"server": {
|
||||
"provider": "HETZNER"
|
||||
},
|
||||
"backup": {
|
||||
"provider": "BACKBLAZE",
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
}
|
||||
}
|
|
@ -1,9 +1,4 @@
|
|||
{
|
||||
"backblaze": {
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
},
|
||||
"api": {
|
||||
"token": "TEST_TOKEN",
|
||||
"enableSwagger": false
|
||||
|
@ -11,9 +6,6 @@
|
|||
"bitwarden": {
|
||||
"enable": false
|
||||
},
|
||||
"cloudflare": {
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"databasePassword": "PASSWORD",
|
||||
"domain": "test.tld",
|
||||
"hashedMasterPassword": "HASHED_PASSWORD",
|
||||
|
@ -45,5 +37,18 @@
|
|||
"timezone": "Europe/Moscow",
|
||||
"sshKeys": [
|
||||
"ssh-rsa KEY test@pc"
|
||||
]
|
||||
],
|
||||
"dns": {
|
||||
"provider": "CLOUDFLARE",
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"server": {
|
||||
"provider": "HETZNER"
|
||||
},
|
||||
"backup": {
|
||||
"provider": "BACKBLAZE",
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
}
|
||||
}
|
|
@ -1,9 +1,4 @@
|
|||
{
|
||||
"backblaze": {
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
},
|
||||
"api": {
|
||||
"token": "TEST_TOKEN",
|
||||
"enableSwagger": false
|
||||
|
@ -11,9 +6,6 @@
|
|||
"bitwarden": {
|
||||
"enable": false
|
||||
},
|
||||
"cloudflare": {
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"databasePassword": "PASSWORD",
|
||||
"domain": "test.tld",
|
||||
"hashedMasterPassword": "HASHED_PASSWORD",
|
||||
|
@ -47,5 +39,18 @@
|
|||
"timezone": "Europe/Moscow",
|
||||
"sshKeys": [
|
||||
"ssh-rsa KEY test@pc"
|
||||
]
|
||||
],
|
||||
"dns": {
|
||||
"provider": "CLOUDFLARE",
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"server": {
|
||||
"provider": "HETZNER"
|
||||
},
|
||||
"backup": {
|
||||
"provider": "BACKBLAZE",
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
}
|
||||
}
|
|
@ -1,9 +1,4 @@
|
|||
{
|
||||
"backblaze": {
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
},
|
||||
"api": {
|
||||
"token": "TEST_TOKEN",
|
||||
"enableSwagger": false
|
||||
|
@ -11,9 +6,6 @@
|
|||
"bitwarden": {
|
||||
"enable": false
|
||||
},
|
||||
"cloudflare": {
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"databasePassword": "PASSWORD",
|
||||
"domain": "test.tld",
|
||||
"hashedMasterPassword": "HASHED_PASSWORD",
|
||||
|
@ -48,5 +40,18 @@
|
|||
"timezone": "Europe/Moscow",
|
||||
"sshKeys": [
|
||||
"ssh-rsa KEY test@pc"
|
||||
]
|
||||
],
|
||||
"dns": {
|
||||
"provider": "CLOUDFLARE",
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"server": {
|
||||
"provider": "HETZNER"
|
||||
},
|
||||
"backup": {
|
||||
"provider": "BACKBLAZE",
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
}
|
||||
}
|
|
@ -1,9 +1,4 @@
|
|||
{
|
||||
"backblaze": {
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
},
|
||||
"api": {
|
||||
"token": "TEST_TOKEN",
|
||||
"enableSwagger": false
|
||||
|
@ -11,9 +6,6 @@
|
|||
"bitwarden": {
|
||||
"enable": false
|
||||
},
|
||||
"cloudflare": {
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"databasePassword": "PASSWORD",
|
||||
"domain": "test.tld",
|
||||
"hashedMasterPassword": "HASHED_PASSWORD",
|
||||
|
@ -48,5 +40,18 @@
|
|||
"timezone": "Europe/Moscow",
|
||||
"sshKeys": [
|
||||
"ssh-rsa KEY test@pc"
|
||||
]
|
||||
],
|
||||
"dns": {
|
||||
"provider": "CLOUDFLARE",
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"server": {
|
||||
"provider": "HETZNER"
|
||||
},
|
||||
"backup": {
|
||||
"provider": "BACKBLAZE",
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
}
|
||||
}
|
|
@ -1,16 +1,8 @@
|
|||
{
|
||||
"backblaze": {
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
},
|
||||
"api": {
|
||||
"token": "TEST_TOKEN",
|
||||
"enableSwagger": false
|
||||
},
|
||||
"cloudflare": {
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"databasePassword": "PASSWORD",
|
||||
"domain": "test.tld",
|
||||
"hashedMasterPassword": "HASHED_PASSWORD",
|
||||
|
@ -40,5 +32,18 @@
|
|||
"timezone": "Europe/Moscow",
|
||||
"sshKeys": [
|
||||
"ssh-rsa KEY test@pc"
|
||||
]
|
||||
],
|
||||
"dns": {
|
||||
"provider": "CLOUDFLARE",
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"server": {
|
||||
"provider": "HETZNER"
|
||||
},
|
||||
"backup": {
|
||||
"provider": "BACKBLAZE",
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
}
|
||||
}
|
|
@ -1,9 +1,4 @@
|
|||
{
|
||||
"backblaze": {
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
},
|
||||
"api": {
|
||||
"token": "TEST_TOKEN",
|
||||
"enableSwagger": false
|
||||
|
@ -11,9 +6,6 @@
|
|||
"bitwarden": {
|
||||
"enable": false
|
||||
},
|
||||
"cloudflare": {
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"databasePassword": "PASSWORD",
|
||||
"domain": "test.tld",
|
||||
"hashedMasterPassword": "HASHED_PASSWORD",
|
||||
|
@ -47,5 +39,18 @@
|
|||
"timezone": "Europe/Moscow",
|
||||
"sshKeys": [
|
||||
"ssh-rsa KEY test@pc"
|
||||
]
|
||||
],
|
||||
"dns": {
|
||||
"provider": "CLOUDFLARE",
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"server": {
|
||||
"provider": "HETZNER"
|
||||
},
|
||||
"backup": {
|
||||
"provider": "BACKBLAZE",
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
}
|
||||
}
|
|
@ -1,9 +1,4 @@
|
|||
{
|
||||
"backblaze": {
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
},
|
||||
"api": {
|
||||
"token": "TEST_TOKEN",
|
||||
"enableSwagger": false
|
||||
|
@ -11,9 +6,6 @@
|
|||
"bitwarden": {
|
||||
"enable": false
|
||||
},
|
||||
"cloudflare": {
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"databasePassword": "PASSWORD",
|
||||
"domain": "test.tld",
|
||||
"hashedMasterPassword": "HASHED_PASSWORD",
|
||||
|
@ -48,5 +40,18 @@
|
|||
"timezone": "Europe/Moscow",
|
||||
"sshKeys": [
|
||||
"ssh-rsa KEY test@pc"
|
||||
]
|
||||
],
|
||||
"dns": {
|
||||
"provider": "CLOUDFLARE",
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"server": {
|
||||
"provider": "HETZNER"
|
||||
},
|
||||
"backup": {
|
||||
"provider": "BACKBLAZE",
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
}
|
||||
}
|
|
@ -1,9 +1,4 @@
|
|||
{
|
||||
"backblaze": {
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
},
|
||||
"api": {
|
||||
"token": "TEST_TOKEN",
|
||||
"enableSwagger": false
|
||||
|
@ -11,9 +6,6 @@
|
|||
"bitwarden": {
|
||||
"enable": false
|
||||
},
|
||||
"cloudflare": {
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"databasePassword": "PASSWORD",
|
||||
"domain": "test.tld",
|
||||
"hashedMasterPassword": "HASHED_PASSWORD",
|
||||
|
@ -48,5 +40,18 @@
|
|||
"timezone": "Europe/Moscow",
|
||||
"sshKeys": [
|
||||
"ssh-rsa KEY test@pc"
|
||||
]
|
||||
],
|
||||
"dns": {
|
||||
"provider": "CLOUDFLARE",
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"server": {
|
||||
"provider": "HETZNER"
|
||||
},
|
||||
"backup": {
|
||||
"provider": "BACKBLAZE",
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
}
|
||||
}
|
|
@ -1,9 +1,4 @@
|
|||
{
|
||||
"backblaze": {
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
},
|
||||
"api": {
|
||||
"token": "TEST_TOKEN",
|
||||
"enableSwagger": false
|
||||
|
@ -11,9 +6,6 @@
|
|||
"bitwarden": {
|
||||
"enable": false
|
||||
},
|
||||
"cloudflare": {
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"databasePassword": "PASSWORD",
|
||||
"domain": "test.tld",
|
||||
"hashedMasterPassword": "HASHED_PASSWORD",
|
||||
|
@ -45,5 +37,18 @@
|
|||
"timezone": "Europe/Moscow",
|
||||
"sshKeys": [
|
||||
"ssh-rsa KEY test@pc"
|
||||
]
|
||||
],
|
||||
"dns": {
|
||||
"provider": "CLOUDFLARE",
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"server": {
|
||||
"provider": "HETZNER"
|
||||
},
|
||||
"backup": {
|
||||
"provider": "BACKBLAZE",
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
}
|
||||
}
|
|
@ -1,9 +1,4 @@
|
|||
{
|
||||
"backblaze": {
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
},
|
||||
"api": {
|
||||
"token": "TEST_TOKEN",
|
||||
"enableSwagger": false
|
||||
|
@ -11,9 +6,6 @@
|
|||
"bitwarden": {
|
||||
"enable": false
|
||||
},
|
||||
"cloudflare": {
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"databasePassword": "PASSWORD",
|
||||
"domain": "test.tld",
|
||||
"hashedMasterPassword": "HASHED_PASSWORD",
|
||||
|
@ -47,5 +39,18 @@
|
|||
"timezone": "Europe/Moscow",
|
||||
"sshKeys": [
|
||||
"ssh-rsa KEY test@pc"
|
||||
]
|
||||
],
|
||||
"dns": {
|
||||
"provider": "CLOUDFLARE",
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"server": {
|
||||
"provider": "HETZNER"
|
||||
},
|
||||
"backup": {
|
||||
"provider": "BACKBLAZE",
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
}
|
||||
}
|
|
@ -1,9 +1,4 @@
|
|||
{
|
||||
"backblaze": {
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
},
|
||||
"api": {
|
||||
"token": "TEST_TOKEN",
|
||||
"enableSwagger": false
|
||||
|
@ -11,9 +6,6 @@
|
|||
"bitwarden": {
|
||||
"enable": false
|
||||
},
|
||||
"cloudflare": {
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"databasePassword": "PASSWORD",
|
||||
"domain": "test.tld",
|
||||
"hashedMasterPassword": "HASHED_PASSWORD",
|
||||
|
@ -48,5 +40,18 @@
|
|||
"timezone": "Europe/Moscow",
|
||||
"sshKeys": [
|
||||
"ssh-rsa KEY test@pc"
|
||||
]
|
||||
],
|
||||
"dns": {
|
||||
"provider": "CLOUDFLARE",
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"server": {
|
||||
"provider": "HETZNER"
|
||||
},
|
||||
"backup": {
|
||||
"provider": "BACKBLAZE",
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
}
|
||||
}
|
|
@ -1,9 +1,4 @@
|
|||
{
|
||||
"backblaze": {
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
},
|
||||
"api": {
|
||||
"token": "TEST_TOKEN",
|
||||
"enableSwagger": false
|
||||
|
@ -11,9 +6,6 @@
|
|||
"bitwarden": {
|
||||
"enable": false
|
||||
},
|
||||
"cloudflare": {
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"databasePassword": "PASSWORD",
|
||||
"domain": "test.tld",
|
||||
"hashedMasterPassword": "HASHED_PASSWORD",
|
||||
|
@ -48,5 +40,18 @@
|
|||
"timezone": "Europe/Moscow",
|
||||
"sshKeys": [
|
||||
"ssh-rsa KEY test@pc"
|
||||
]
|
||||
],
|
||||
"dns": {
|
||||
"provider": "CLOUDFLARE",
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"server": {
|
||||
"provider": "HETZNER"
|
||||
},
|
||||
"backup": {
|
||||
"provider": "BACKBLAZE",
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
}
|
||||
}
|
|
@ -1,9 +1,4 @@
|
|||
{
|
||||
"backblaze": {
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
},
|
||||
"api": {
|
||||
"token": "TEST_TOKEN",
|
||||
"enableSwagger": false
|
||||
|
@ -11,9 +6,6 @@
|
|||
"bitwarden": {
|
||||
"enable": false
|
||||
},
|
||||
"cloudflare": {
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"databasePassword": "PASSWORD",
|
||||
"domain": "test.tld",
|
||||
"hashedMasterPassword": "HASHED_PASSWORD",
|
||||
|
@ -45,5 +37,18 @@
|
|||
"timezone": "Europe/Moscow",
|
||||
"sshKeys": [
|
||||
"ssh-rsa KEY test@pc"
|
||||
]
|
||||
],
|
||||
"dns": {
|
||||
"provider": "CLOUDFLARE",
|
||||
"apiKey": "TOKEN"
|
||||
},
|
||||
"server": {
|
||||
"provider": "HETZNER"
|
||||
},
|
||||
"backup": {
|
||||
"provider": "BACKBLAZE",
|
||||
"accountId": "ID",
|
||||
"accountKey": "KEY",
|
||||
"bucket": "selfprivacy"
|
||||
}
|
||||
}
|
|
@ -161,7 +161,7 @@ def mock_restic_tasks(mocker):
|
|||
@pytest.fixture
|
||||
def undefined_settings(mocker, datadir):
|
||||
mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "undefined.json")
|
||||
assert "backblaze" not in read_json(datadir / "undefined.json")
|
||||
assert "backup" not in read_json(datadir / "undefined.json")
|
||||
return datadir
|
||||
|
||||
|
||||
|
@ -170,20 +170,22 @@ def some_settings(mocker, datadir):
|
|||
mocker.patch(
|
||||
"selfprivacy_api.utils.USERDATA_FILE", new=datadir / "some_values.json"
|
||||
)
|
||||
assert "backblaze" in read_json(datadir / "some_values.json")
|
||||
assert read_json(datadir / "some_values.json")["backblaze"]["accountId"] == "ID"
|
||||
assert read_json(datadir / "some_values.json")["backblaze"]["accountKey"] == "KEY"
|
||||
assert read_json(datadir / "some_values.json")["backblaze"]["bucket"] == "BUCKET"
|
||||
assert "backup" in read_json(datadir / "some_values.json")
|
||||
assert read_json(datadir / "some_values.json")["backup"]["provider"] == "BACKBLAZE"
|
||||
assert read_json(datadir / "some_values.json")["backup"]["accountId"] == "ID"
|
||||
assert read_json(datadir / "some_values.json")["backup"]["accountKey"] == "KEY"
|
||||
assert read_json(datadir / "some_values.json")["backup"]["bucket"] == "BUCKET"
|
||||
return datadir
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def no_values(mocker, datadir):
|
||||
mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "no_values.json")
|
||||
assert "backblaze" in read_json(datadir / "no_values.json")
|
||||
assert "accountId" not in read_json(datadir / "no_values.json")["backblaze"]
|
||||
assert "accountKey" not in read_json(datadir / "no_values.json")["backblaze"]
|
||||
assert "bucket" not in read_json(datadir / "no_values.json")["backblaze"]
|
||||
assert "backup" in read_json(datadir / "no_values.json")
|
||||
assert "provider" not in read_json(datadir / "no_values.json")["backup"]
|
||||
assert "accountId" not in read_json(datadir / "no_values.json")["backup"]
|
||||
assert "accountKey" not in read_json(datadir / "no_values.json")["backup"]
|
||||
assert "bucket" not in read_json(datadir / "no_values.json")["backup"]
|
||||
return datadir
|
||||
|
||||
|
||||
|
@ -462,7 +464,8 @@ def test_set_backblaze_config(
|
|||
)
|
||||
assert response.status_code == 200
|
||||
assert mock_restic_tasks.update_keys_from_userdata.call_count == 1
|
||||
assert read_json(some_settings / "some_values.json")["backblaze"] == {
|
||||
assert read_json(some_settings / "some_values.json")["backup"] == {
|
||||
"provider": "BACKBLAZE",
|
||||
"accountId": "123",
|
||||
"accountKey": "456",
|
||||
"bucket": "789",
|
||||
|
@ -478,7 +481,8 @@ def test_set_backblaze_config_on_undefined(
|
|||
)
|
||||
assert response.status_code == 200
|
||||
assert mock_restic_tasks.update_keys_from_userdata.call_count == 1
|
||||
assert read_json(undefined_settings / "undefined.json")["backblaze"] == {
|
||||
assert read_json(undefined_settings / "undefined.json")["backup"] == {
|
||||
"provider": "BACKBLAZE",
|
||||
"accountId": "123",
|
||||
"accountKey": "456",
|
||||
"bucket": "789",
|
||||
|
@ -494,7 +498,8 @@ def test_set_backblaze_config_on_no_values(
|
|||
)
|
||||
assert response.status_code == 200
|
||||
assert mock_restic_tasks.update_keys_from_userdata.call_count == 1
|
||||
assert read_json(no_values / "no_values.json")["backblaze"] == {
|
||||
assert read_json(no_values / "no_values.json")["backup"] == {
|
||||
"provider": "BACKBLAZE",
|
||||
"accountId": "123",
|
||||
"accountKey": "456",
|
||||
"bucket": "789",
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue