From e32b95679b300abd01e7543c4db5021ad440a088 Mon Sep 17 00:00:00 2001 From: inexcode Date: Tue, 25 Oct 2022 23:59:39 +0300 Subject: [PATCH 01/12] The starting point for Def --- selfprivacy_api/migrations/__init__.py | 4 +- selfprivacy_api/models/__init__.py | 0 .../models/tokens/new_device_key.py | 47 ++++++++ selfprivacy_api/models/tokens/recovery_key.py | 56 ++++++++++ selfprivacy_api/models/tokens/token.py | 34 ++++++ selfprivacy_api/repositories/__init__.py | 0 .../repositories/tokens/__init__.py | 8 ++ .../tokens/abstract_tokens_repository.py | 101 ++++++++++++++++++ .../tokens/json_tokens_repository.py | 7 ++ .../tokens/redis_tokens_repository.py | 15 +++ 10 files changed, 271 insertions(+), 1 deletion(-) create mode 100644 selfprivacy_api/models/__init__.py create mode 100644 selfprivacy_api/models/tokens/new_device_key.py create mode 100644 selfprivacy_api/models/tokens/recovery_key.py create mode 100644 selfprivacy_api/models/tokens/token.py create mode 100644 selfprivacy_api/repositories/__init__.py create mode 100644 selfprivacy_api/repositories/tokens/__init__.py create mode 100644 selfprivacy_api/repositories/tokens/abstract_tokens_repository.py create mode 100644 selfprivacy_api/repositories/tokens/json_tokens_repository.py create mode 100644 selfprivacy_api/repositories/tokens/redis_tokens_repository.py diff --git a/selfprivacy_api/migrations/__init__.py b/selfprivacy_api/migrations/__init__.py index 8209198..b051f04 100644 --- a/selfprivacy_api/migrations/__init__.py +++ b/selfprivacy_api/migrations/__init__.py @@ -8,7 +8,9 @@ at api.skippedMigrations in userdata.json and populating it with IDs of the migrations to skip. Adding DISABLE_ALL to that array disables the migrations module entirely. """ -from selfprivacy_api.migrations.check_for_failed_binds_migration import CheckForFailedBindsMigration +from selfprivacy_api.migrations.check_for_failed_binds_migration import ( + CheckForFailedBindsMigration, +) from selfprivacy_api.utils import ReadUserData from selfprivacy_api.migrations.fix_nixos_config_branch import FixNixosConfigBranch from selfprivacy_api.migrations.create_tokens_json import CreateTokensJson diff --git a/selfprivacy_api/models/__init__.py b/selfprivacy_api/models/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/selfprivacy_api/models/tokens/new_device_key.py b/selfprivacy_api/models/tokens/new_device_key.py new file mode 100644 index 0000000..08941b7 --- /dev/null +++ b/selfprivacy_api/models/tokens/new_device_key.py @@ -0,0 +1,47 @@ +""" +New device key used to obtain access token. +""" +from datetime import datetime, timedelta +import secrets +from typing import Optional +from pydantic import BaseModel +from mnemonic import Mnemonic + + +class NewDeviceKey(BaseModel): + """ + Recovery key used to obtain access token. + + Recovery key has a key string, date of creation, date of expiration. + """ + + key: str + created_at: datetime + expires_at: Optional[datetime] + + def is_valid(self) -> bool: + """ + Check if the recovery key is valid. + """ + if self.expires_at is not None and self.expires_at < datetime.now(): + return False + return True + + def as_mnemonic(self) -> str: + """ + Get the recovery key as a mnemonic. + """ + return Mnemonic(language="english").to_mnemonic(bytes.fromhex(self.key)) + + @staticmethod + def generate() -> "NewDeviceKey": + """ + Factory to generate a random token. + """ + creation_date = datetime.now() + key = secrets.token_bytes(16).hex() + return NewDeviceKey( + key=key, + created_at=creation_date, + expires_at=datetime.now() + timedelta(minutes=10), + ) diff --git a/selfprivacy_api/models/tokens/recovery_key.py b/selfprivacy_api/models/tokens/recovery_key.py new file mode 100644 index 0000000..098aceb --- /dev/null +++ b/selfprivacy_api/models/tokens/recovery_key.py @@ -0,0 +1,56 @@ +""" +Recovery key used to obtain access token. + +Recovery key has a token string, date of creation, optional date of expiration and optional count of uses left. +""" +from datetime import datetime +import secrets +from typing import Optional +from pydantic import BaseModel +from mnemonic import Mnemonic + + +class RecoveryKey(BaseModel): + """ + Recovery key used to obtain access token. + + Recovery key has a key string, date of creation, optional date of expiration and optional count of uses left. + """ + + key: str + created_at: datetime + expires_at: Optional[datetime] + uses_left: Optional[int] + + def is_valid(self) -> bool: + """ + Check if the recovery key is valid. + """ + if self.expires_at is not None and self.expires_at < datetime.now(): + return False + if self.uses_left is not None and self.uses_left <= 0: + return False + return True + + def as_mnemonic(self) -> str: + """ + Get the recovery key as a mnemonic. + """ + return Mnemonic(language="english").to_mnemonic(bytes.fromhex(self.key)) + + @staticmethod + def generate( + expiration: Optional[datetime], + uses_left: Optional[int], + ) -> "RecoveryKey": + """ + Factory to generate a random token. + """ + creation_date = datetime.now() + key = secrets.token_bytes(24).hex() + return RecoveryKey( + key=key, + created_at=creation_date, + expires_at=expiration, + uses_left=uses_left, + ) diff --git a/selfprivacy_api/models/tokens/token.py b/selfprivacy_api/models/tokens/token.py new file mode 100644 index 0000000..4a5cd7f --- /dev/null +++ b/selfprivacy_api/models/tokens/token.py @@ -0,0 +1,34 @@ +""" +Model of the access token. + +Access token has a token string, device name and date of creation. +""" +from datetime import datetime +import secrets +from typing import Optional +from pydantic import BaseModel + + +class Token(BaseModel): + """ + Model of the access token. + + Access token has a token string, device name and date of creation. + """ + + token: str + device_name: str + created_at: datetime + + @staticmethod + def generate(name: str) -> "Token": + """ + Factory to generate a random token. + """ + creation_date = datetime.now() + token = secrets.token_urlsafe(32) + return Token( + token=token, + device_name=name, + created_at=creation_date, + ) diff --git a/selfprivacy_api/repositories/__init__.py b/selfprivacy_api/repositories/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/selfprivacy_api/repositories/tokens/__init__.py b/selfprivacy_api/repositories/tokens/__init__.py new file mode 100644 index 0000000..9941bdc --- /dev/null +++ b/selfprivacy_api/repositories/tokens/__init__.py @@ -0,0 +1,8 @@ +from selfprivacy_api.repositories.tokens.abstract_tokens_repository import ( + AbstractTokensRepository, +) +from selfprivacy_api.repositories.tokens.json_tokens_repository import ( + JsonTokensRepository, +) + +repository = JsonTokensRepository() diff --git a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py new file mode 100644 index 0000000..1073ca7 --- /dev/null +++ b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py @@ -0,0 +1,101 @@ +from abc import ABC, abstractmethod +from datetime import datetime +from typing import List, Optional + +from selfprivacy_api.models.tokens.token import Token +from selfprivacy_api.models.tokens.recovery_key import RecoveryKey +from selfprivacy_api.models.tokens.new_device_key import NewDeviceKey + + +class AbstractTokensRepository(ABC): + @abstractmethod + def get_token_by_token_string(self, token_string: str) -> Optional[Token]: + """Get the token by token""" + ... + + @abstractmethod + def get_token_by_name(self, token_name: str) -> Optional[Token]: + """Get the token by name""" + ... + + @abstractmethod + def get_tokens(self) -> list[Token]: + """Get the tokens""" + ... + + @abstractmethod + def create_token(self, name: str) -> Token: + """Create new token""" + ... + + @abstractmethod + def delete_token(self, token: Token) -> None: + """Delete the token""" + ... + + @abstractmethod + def refresh_token(self, token: Token) -> Token: + """Refresh the token""" + ... + + def is_token_valid(self, token_string: str) -> bool: + """Check if the token is valid""" + token = self.get_token_by_token_string(token_string) + if token is None: + return False + return True + + def is_token_name_exists(self, token_name: str) -> bool: + """Check if the token name exists""" + token = self.get_token_by_name(token_name) + if token is None: + return False + return True + + def is_token_name_pair_valid(self, token_name: str, token_string: str) -> bool: + """Check if the token name and token are valid""" + token = self.get_token_by_name(token_name) + if token is None: + return False + return token.token == token_string + + @abstractmethod + def get_recovery_key(self) -> Optional[RecoveryKey]: + """Get the recovery key""" + ... + + @abstractmethod + def create_recovery_key( + self, + expiration: Optional[datetime], + uses_left: Optional[int], + ) -> RecoveryKey: + """Create the recovery key""" + ... + + @abstractmethod + def use_mnemonic_recovery_key(self, mnemonic_phrase: str, name: str) -> Token: + """Use the mnemonic recovery key and create a new token with the given name""" + ... + + def is_recovery_key_valid(self) -> bool: + """Check if the recovery key is valid""" + recovery_key = self.get_recovery_key() + if recovery_key is None: + return False + return recovery_key.is_valid() + + @abstractmethod + def get_new_device_key(self) -> NewDeviceKey: + """Creates and returns the new device key""" + ... + + @abstractmethod + def delete_new_device_key(self) -> None: + """Delete the new device key""" + ... + + @abstractmethod + def use_mnemonic_new_device_key(self, mnemonic_phrase: str, name: str) -> None: + """Use the mnemonic new device key""" + ... diff --git a/selfprivacy_api/repositories/tokens/json_tokens_repository.py b/selfprivacy_api/repositories/tokens/json_tokens_repository.py new file mode 100644 index 0000000..7302096 --- /dev/null +++ b/selfprivacy_api/repositories/tokens/json_tokens_repository.py @@ -0,0 +1,7 @@ +from selfprivacy_api.repositories.tokens.abstract_tokens_repository import ( + AbstractTokensRepository, +) + + +class JsonTokensRepository(AbstractTokensRepository): + pass diff --git a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py new file mode 100644 index 0000000..0186c11 --- /dev/null +++ b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py @@ -0,0 +1,15 @@ +""" +Token repository using Redis as backend. +""" +from selfprivacy_api.repositories.tokens.abstract_tokens_repository import ( + AbstractTokensRepository, +) + + +class RedisTokensRepository(AbstractTokensRepository): + """ + Token repository using Redis as a backend + """ + + def __init__(self) -> None: + raise NotImplementedError -- 2.42.0 From bf3d921e2d08b0db88ee9e2e4bc7d0aa3e29df8b Mon Sep 17 00:00:00 2001 From: def Date: Thu, 27 Oct 2022 18:16:22 +0400 Subject: [PATCH 02/12] refactor: implemented a json repository --- .../models/tokens/new_device_key.py | 5 +- selfprivacy_api/models/tokens/token.py | 5 +- .../tokens/abstract_tokens_repository.py | 18 +- .../repositories/tokens/exceptions.py | 2 + .../tokens/json_tokens_repository.py | 155 +++++++++++++++++- 5 files changed, 163 insertions(+), 22 deletions(-) create mode 100644 selfprivacy_api/repositories/tokens/exceptions.py diff --git a/selfprivacy_api/models/tokens/new_device_key.py b/selfprivacy_api/models/tokens/new_device_key.py index 08941b7..dda926c 100644 --- a/selfprivacy_api/models/tokens/new_device_key.py +++ b/selfprivacy_api/models/tokens/new_device_key.py @@ -3,7 +3,6 @@ New device key used to obtain access token. """ from datetime import datetime, timedelta import secrets -from typing import Optional from pydantic import BaseModel from mnemonic import Mnemonic @@ -17,13 +16,13 @@ class NewDeviceKey(BaseModel): key: str created_at: datetime - expires_at: Optional[datetime] + expires_at: datetime def is_valid(self) -> bool: """ Check if the recovery key is valid. """ - if self.expires_at is not None and self.expires_at < datetime.now(): + if self.expires_at < datetime.now(): return False return True diff --git a/selfprivacy_api/models/tokens/token.py b/selfprivacy_api/models/tokens/token.py index 4a5cd7f..4c34f58 100644 --- a/selfprivacy_api/models/tokens/token.py +++ b/selfprivacy_api/models/tokens/token.py @@ -5,7 +5,6 @@ Access token has a token string, device name and date of creation. """ from datetime import datetime import secrets -from typing import Optional from pydantic import BaseModel @@ -21,7 +20,7 @@ class Token(BaseModel): created_at: datetime @staticmethod - def generate(name: str) -> "Token": + def generate(device_name: str) -> "Token": """ Factory to generate a random token. """ @@ -29,6 +28,6 @@ class Token(BaseModel): token = secrets.token_urlsafe(32) return Token( token=token, - device_name=name, + device_name=device_name, created_at=creation_date, ) diff --git a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py index 1073ca7..3aac3d7 100644 --- a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py @@ -1,6 +1,6 @@ from abc import ABC, abstractmethod from datetime import datetime -from typing import List, Optional +from typing import Optional from selfprivacy_api.models.tokens.token import Token from selfprivacy_api.models.tokens.recovery_key import RecoveryKey @@ -11,32 +11,26 @@ class AbstractTokensRepository(ABC): @abstractmethod def get_token_by_token_string(self, token_string: str) -> Optional[Token]: """Get the token by token""" - ... @abstractmethod def get_token_by_name(self, token_name: str) -> Optional[Token]: """Get the token by name""" - ... @abstractmethod def get_tokens(self) -> list[Token]: """Get the tokens""" - ... @abstractmethod def create_token(self, name: str) -> Token: """Create new token""" - ... @abstractmethod - def delete_token(self, token: Token) -> None: + def delete_token(self, input_token: Token) -> None: """Delete the token""" - ... @abstractmethod - def refresh_token(self, token: Token) -> Token: + def refresh_token(self, input_token: Token) -> Token: """Refresh the token""" - ... def is_token_valid(self, token_string: str) -> bool: """Check if the token is valid""" @@ -62,7 +56,6 @@ class AbstractTokensRepository(ABC): @abstractmethod def get_recovery_key(self) -> Optional[RecoveryKey]: """Get the recovery key""" - ... @abstractmethod def create_recovery_key( @@ -71,12 +64,10 @@ class AbstractTokensRepository(ABC): uses_left: Optional[int], ) -> RecoveryKey: """Create the recovery key""" - ... @abstractmethod def use_mnemonic_recovery_key(self, mnemonic_phrase: str, name: str) -> Token: """Use the mnemonic recovery key and create a new token with the given name""" - ... def is_recovery_key_valid(self) -> bool: """Check if the recovery key is valid""" @@ -88,14 +79,11 @@ class AbstractTokensRepository(ABC): @abstractmethod def get_new_device_key(self) -> NewDeviceKey: """Creates and returns the new device key""" - ... @abstractmethod def delete_new_device_key(self) -> None: """Delete the new device key""" - ... @abstractmethod def use_mnemonic_new_device_key(self, mnemonic_phrase: str, name: str) -> None: """Use the mnemonic new device key""" - ... diff --git a/selfprivacy_api/repositories/tokens/exceptions.py b/selfprivacy_api/repositories/tokens/exceptions.py new file mode 100644 index 0000000..5f26d46 --- /dev/null +++ b/selfprivacy_api/repositories/tokens/exceptions.py @@ -0,0 +1,2 @@ +class TokenNotFoundError(Exception): + """Token not found!""" diff --git a/selfprivacy_api/repositories/tokens/json_tokens_repository.py b/selfprivacy_api/repositories/tokens/json_tokens_repository.py index 7302096..90bc8ee 100644 --- a/selfprivacy_api/repositories/tokens/json_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/json_tokens_repository.py @@ -1,7 +1,160 @@ +""" +temporary legacy +""" +from secrets import token_bytes +from typing import Optional +from datetime import datetime + +from selfprivacy_api.utils import UserDataFiles, WriteUserData, ReadUserData +from selfprivacy_api.models.tokens.token import Token +from selfprivacy_api.models.tokens.recovery_key import RecoveryKey +from selfprivacy_api.models.tokens.new_device_key import NewDeviceKey +from selfprivacy_api.repositories.tokens.exceptions import TokenNotFoundError from selfprivacy_api.repositories.tokens.abstract_tokens_repository import ( AbstractTokensRepository, ) class JsonTokensRepository(AbstractTokensRepository): - pass + def get_token_by_token_string(self, token_string: str) -> Optional[Token]: + """Get the token by token""" + with ReadUserData(UserDataFiles.TOKENS) as tokens_file: + for userdata_token in tokens_file["tokens"]: + if userdata_token["token"] == token_string: + + return Token( + token=token_string, + device_name=userdata_token["name"], + created_at=userdata_token["date"], + ) + + raise TokenNotFoundError("Token not found!") + + def get_token_by_name(self, token_name: str) -> Optional[Token]: + """Get the token by name""" + with ReadUserData(UserDataFiles.TOKENS) as tokens_file: + for userdata_token in tokens_file["tokens"]: + if userdata_token["name"] == token_name: + + return Token( + token=userdata_token["token"], + device_name=token_name, + created_at=userdata_token["date"], + ) + + raise TokenNotFoundError("Token not found!") + + def get_tokens(self) -> list[Token]: + """Get the tokens""" + tokens_list = [] + + with ReadUserData(UserDataFiles.TOKENS) as tokens_file: + for userdata_token in tokens_file["tokens"]: + tokens_list.append( + Token( + token=userdata_token.token, + device_name=userdata_token.name, + created_at=userdata_token.date, + ) + ) + + return tokens_list + + def create_token(self, name: str) -> Token: + """Create new token""" + new_token = Token.generate(device_name=name) + + with WriteUserData(UserDataFiles.TOKENS) as tokens_file: + tokens_file["tokens"].append( + { + "token": new_token.token, + "name": new_token.device_name, + "date": new_token.created_at, + } + ) + return new_token + + def delete_token(self, input_token: Token) -> None: + """Delete the token""" + with WriteUserData(UserDataFiles.TOKENS) as tokens_file: + for userdata_token in tokens_file["tokens"]: + if userdata_token["token"] == input_token: + tokens_file["tokens"].remove( + userdata_token + ) # Allah, i pray it works + + def refresh_token(self, input_token: Token) -> Token: + """Change the token field of the existing token""" + new_token = Token.generate(device_name=input_token.device_name) + + with WriteUserData(UserDataFiles.TOKENS) as tokens_file: + for userdata_token in tokens_file["tokens"]: + + if userdata_token["token"] == input_token.token: + userdata_token["token"] = new_token.token + userdata_token["data"] = new_token.created_at + + return new_token + + raise TokenNotFoundError("Token not found!") + + def get_recovery_key(self) -> Optional[RecoveryKey]: + """Get the recovery key""" + with ReadUserData(UserDataFiles.TOKENS) as tokens_file: + + if tokens_file["recovery_token"] is None: + return + + recovery_key = RecoveryKey( + key=tokens_file["recovery_token"]["token"], + created_at=tokens_file["recovery_token"]["date"], + expires_at=tokens_file["recovery_token"]["expitation"], + uses_left=tokens_file["recovery_token"]["uses_left"], + ) + + return recovery_key + + def create_recovery_key( + self, + expiration: Optional[datetime], + uses_left: Optional[int], + ) -> RecoveryKey: + """Create the recovery key""" + + recovery_key = RecoveryKey.generate(expiration=None, uses_left=None) + + with ReadUserData(UserDataFiles.TOKENS) as tokens_file: + tokens_file["recovery_key"] = { + "token": recovery_key.key, + "date": recovery_key.created_at, + "expiration": recovery_key.expires_at, + "uses_left": recovery_key.uses_left, + } + + return recovery_key + + def use_mnemonic_recovery_key(self, mnemonic_phrase: str, name: str) -> Token: + """Use the mnemonic recovery key and create a new token with the given name""" + ... + + def get_new_device_key(self) -> NewDeviceKey: + """Creates and returns the new device key""" + new_device_key = NewDeviceKey.generate() + + with WriteUserData(UserDataFiles.TOKENS) as tokens_file: + tokens_file["new_device"] = { + "token": new_device_key.key, + "data": new_device_key.created_at, + "expiration": new_device_key.expires_at, + } + + return new_device_key + + def delete_new_device_key(self) -> None: + """Delete the new device key""" + with WriteUserData(UserDataFiles.TOKENS) as tokens_file: + tokens_file.pop("new_device") + + def use_mnemonic_new_device_key(self, mnemonic_phrase: str, name: str) -> None: + """Use the mnemonic new device key""" + ... -- 2.42.0 From 4a09d360ac984e33042944a2afb271513e393a75 Mon Sep 17 00:00:00 2001 From: def Date: Sat, 29 Oct 2022 04:50:13 +0400 Subject: [PATCH 03/12] i dont know how it works --- .../tokens/json_tokens_repository.py | 53 ++++++++++++++++--- 1 file changed, 45 insertions(+), 8 deletions(-) diff --git a/selfprivacy_api/repositories/tokens/json_tokens_repository.py b/selfprivacy_api/repositories/tokens/json_tokens_repository.py index 90bc8ee..3ed3c1f 100644 --- a/selfprivacy_api/repositories/tokens/json_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/json_tokens_repository.py @@ -1,7 +1,6 @@ """ temporary legacy """ -from secrets import token_bytes from typing import Optional from datetime import datetime @@ -13,6 +12,7 @@ from selfprivacy_api.repositories.tokens.exceptions import TokenNotFoundError from selfprivacy_api.repositories.tokens.abstract_tokens_repository import ( AbstractTokensRepository, ) +from mnemonic import Mnemonic class JsonTokensRepository(AbstractTokensRepository): @@ -106,10 +106,10 @@ class JsonTokensRepository(AbstractTokensRepository): return recovery_key = RecoveryKey( - key=tokens_file["recovery_token"]["token"], - created_at=tokens_file["recovery_token"]["date"], - expires_at=tokens_file["recovery_token"]["expitation"], - uses_left=tokens_file["recovery_token"]["uses_left"], + key=tokens_file["recovery_token"].get("token"), + created_at=tokens_file["recovery_token"].get("date"), + expires_at=tokens_file["recovery_token"].get("expitation"), + uses_left=tokens_file["recovery_token"].get("uses_left"), ) return recovery_key @@ -121,7 +121,7 @@ class JsonTokensRepository(AbstractTokensRepository): ) -> RecoveryKey: """Create the recovery key""" - recovery_key = RecoveryKey.generate(expiration=None, uses_left=None) + recovery_key = RecoveryKey.generate(expiration=expiration, uses_left=uses_left) with ReadUserData(UserDataFiles.TOKENS) as tokens_file: tokens_file["recovery_key"] = { @@ -135,7 +135,43 @@ class JsonTokensRepository(AbstractTokensRepository): def use_mnemonic_recovery_key(self, mnemonic_phrase: str, name: str) -> Token: """Use the mnemonic recovery key and create a new token with the given name""" - ... + recovery_key = self.get_recovery_key() # self ? + + if recovery_key is None: + return None + + if not recovery_key.is_valid(): + return None + + if recovery_key is None: + return None + + recovery_token = bytes.fromhex(recovery_key.key) + + if not Mnemonic(language="english").check(mnemonic_phrase): + return None + + phrase_bytes = Mnemonic(language="english").to_entropy(mnemonic_phrase) + if phrase_bytes != recovery_token: + return None + + new_recovery_key = RecoveryKey.generate() + + with WriteUserData(UserDataFiles.TOKENS) as tokens: + tokens["tokens"].append( + { + "token": new_recovery_key.key, + # "name": new_recovery_key.name, what???? there is no name + "date": str(datetime.now()), + } + ) + if "recovery_token" in tokens: + if ( + "uses_left" in tokens["recovery_token"] + and tokens["recovery_token"]["uses_left"] is not None + ): + tokens["recovery_token"]["uses_left"] -= 1 + return new_recovery_key def get_new_device_key(self) -> NewDeviceKey: """Creates and returns the new device key""" @@ -153,7 +189,8 @@ class JsonTokensRepository(AbstractTokensRepository): def delete_new_device_key(self) -> None: """Delete the new device key""" with WriteUserData(UserDataFiles.TOKENS) as tokens_file: - tokens_file.pop("new_device") + if "new_device" in tokens_file: + del tokens_file["new_device"] def use_mnemonic_new_device_key(self, mnemonic_phrase: str, name: str) -> None: """Use the mnemonic new device key""" -- 2.42.0 From 80a3750d92aa166b4c583729ff5e690be1c65d46 Mon Sep 17 00:00:00 2001 From: def Date: Thu, 3 Nov 2022 05:43:15 +0400 Subject: [PATCH 04/12] refactor: fix rp issues - add raise errors - fix names confusion - add use_mnemonic_new_device_key() --- .../tokens/abstract_tokens_repository.py | 8 ++- .../repositories/tokens/exceptions.py | 16 +++++ .../tokens/json_tokens_repository.py | 62 +++++++++++++------ 3 files changed, 66 insertions(+), 20 deletions(-) diff --git a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py index 3aac3d7..d4ad05a 100644 --- a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py @@ -66,7 +66,9 @@ class AbstractTokensRepository(ABC): """Create the recovery key""" @abstractmethod - def use_mnemonic_recovery_key(self, mnemonic_phrase: str, name: str) -> Token: + def use_mnemonic_recovery_key( + self, mnemonic_phrase: str, device_name: str + ) -> Token: """Use the mnemonic recovery key and create a new token with the given name""" def is_recovery_key_valid(self) -> bool: @@ -85,5 +87,7 @@ class AbstractTokensRepository(ABC): """Delete the new device key""" @abstractmethod - def use_mnemonic_new_device_key(self, mnemonic_phrase: str, name: str) -> None: + def use_mnemonic_new_device_key( + self, mnemonic_phrase: str, device_name: str + ) -> Token: """Use the mnemonic new device key""" diff --git a/selfprivacy_api/repositories/tokens/exceptions.py b/selfprivacy_api/repositories/tokens/exceptions.py index 5f26d46..d5049b4 100644 --- a/selfprivacy_api/repositories/tokens/exceptions.py +++ b/selfprivacy_api/repositories/tokens/exceptions.py @@ -1,2 +1,18 @@ class TokenNotFoundError(Exception): """Token not found!""" + + +class RecoveryKeyNotFoundError(Exception): + """Recovery key not found!""" + + +class MnemonicError(Exception): + """Phrase is not mnemonic!""" + + +class RecoveryKeyIsNotValidError(Exception): + """Recovery key is not valid!""" + + +class RecoveryTokenError(Exception): + """Error ???""" diff --git a/selfprivacy_api/repositories/tokens/json_tokens_repository.py b/selfprivacy_api/repositories/tokens/json_tokens_repository.py index 3ed3c1f..a6b6ba8 100644 --- a/selfprivacy_api/repositories/tokens/json_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/json_tokens_repository.py @@ -8,7 +8,13 @@ from selfprivacy_api.utils import UserDataFiles, WriteUserData, ReadUserData from selfprivacy_api.models.tokens.token import Token from selfprivacy_api.models.tokens.recovery_key import RecoveryKey from selfprivacy_api.models.tokens.new_device_key import NewDeviceKey -from selfprivacy_api.repositories.tokens.exceptions import TokenNotFoundError +from selfprivacy_api.repositories.tokens.exceptions import ( + TokenNotFoundError, + RecoveryKeyNotFoundError, + MnemonicError, + RecoveryKeyIsNotValidError, + RecoveryTokenError, +) from selfprivacy_api.repositories.tokens.abstract_tokens_repository import ( AbstractTokensRepository, ) @@ -81,7 +87,7 @@ class JsonTokensRepository(AbstractTokensRepository): if userdata_token["token"] == input_token: tokens_file["tokens"].remove( userdata_token - ) # Allah, i pray it works + ) # Naiji, i pray it works def refresh_token(self, input_token: Token) -> Token: """Change the token field of the existing token""" @@ -133,45 +139,45 @@ class JsonTokensRepository(AbstractTokensRepository): return recovery_key - def use_mnemonic_recovery_key(self, mnemonic_phrase: str, name: str) -> Token: + def use_mnemonic_recovery_key( + self, mnemonic_phrase: str, device_name: str + ) -> Token: """Use the mnemonic recovery key and create a new token with the given name""" - recovery_key = self.get_recovery_key() # self ? + recovery_key = self.get_recovery_key() if recovery_key is None: - return None + raise RecoveryKeyNotFoundError("Recovery key is None!") if not recovery_key.is_valid(): - return None - - if recovery_key is None: - return None + raise RecoveryKeyIsNotValidError("Recovery key is not valid!") recovery_token = bytes.fromhex(recovery_key.key) if not Mnemonic(language="english").check(mnemonic_phrase): - return None + raise MnemonicError("Phrase is not mnemonic!") phrase_bytes = Mnemonic(language="english").to_entropy(mnemonic_phrase) if phrase_bytes != recovery_token: - return None + raise RecoveryTokenError("Phrase is not (?) recovery token") - new_recovery_key = RecoveryKey.generate() + new_token = Token.generate(device_name=device_name) with WriteUserData(UserDataFiles.TOKENS) as tokens: tokens["tokens"].append( { - "token": new_recovery_key.key, - # "name": new_recovery_key.name, what???? there is no name - "date": str(datetime.now()), + "token": new_token.token, + "name": new_token.device_name, + "date": new_token.created_at, } ) + if "recovery_token" in tokens: if ( "uses_left" in tokens["recovery_token"] and tokens["recovery_token"]["uses_left"] is not None ): tokens["recovery_token"]["uses_left"] -= 1 - return new_recovery_key + return new_token def get_new_device_key(self) -> NewDeviceKey: """Creates and returns the new device key""" @@ -192,6 +198,26 @@ class JsonTokensRepository(AbstractTokensRepository): if "new_device" in tokens_file: del tokens_file["new_device"] - def use_mnemonic_new_device_key(self, mnemonic_phrase: str, name: str) -> None: + def use_mnemonic_new_device_key( + self, mnemonic_phrase: str, device_name: str + ) -> Token: """Use the mnemonic new device key""" - ... + new_device_key = NewDeviceKey.generate() + + if new_device_key.key is None: + raise TokenNotFoundError("Device key is None!") + + token = bytes.fromhex(new_device_key.key) + if not Mnemonic(language="english").check(mnemonic_phrase): + raise MnemonicError("Phrase is not mnemonic!") + + phrase_bytes = Mnemonic(language="english").to_entropy(mnemonic_phrase) + if phrase_bytes != token: + raise MnemonicError("Phrase is not token!") + + new_token = Token.generate(device_name=device_name) + with WriteUserData(UserDataFiles.TOKENS) as tokens: + if "new_device" in tokens: + del tokens["new_device"] + + return new_token -- 2.42.0 From 106a63e28c271b992081a3d1183f5ca789ff4fa5 Mon Sep 17 00:00:00 2001 From: def Date: Mon, 7 Nov 2022 18:05:25 +0400 Subject: [PATCH 05/12] refactor, test: add some token repo tests, fix json tokens repo --- .../tokens/abstract_tokens_repository.py | 2 +- .../tokens/json_tokens_repository.py | 10 +-- .../test_repository/test_tokens_repository.py | 81 +++++++++++++++++++ .../test_tokens_repository/tokens.json | 20 +++++ 4 files changed, 107 insertions(+), 6 deletions(-) create mode 100644 tests/test_graphql/test_repository/test_tokens_repository.py create mode 100644 tests/test_graphql/test_repository/test_tokens_repository/tokens.json diff --git a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py index d4ad05a..3cf6e1d 100644 --- a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py @@ -21,7 +21,7 @@ class AbstractTokensRepository(ABC): """Get the tokens""" @abstractmethod - def create_token(self, name: str) -> Token: + def create_token(self, device_name: str) -> Token: """Create new token""" @abstractmethod diff --git a/selfprivacy_api/repositories/tokens/json_tokens_repository.py b/selfprivacy_api/repositories/tokens/json_tokens_repository.py index a6b6ba8..e23ed0b 100644 --- a/selfprivacy_api/repositories/tokens/json_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/json_tokens_repository.py @@ -58,17 +58,17 @@ class JsonTokensRepository(AbstractTokensRepository): for userdata_token in tokens_file["tokens"]: tokens_list.append( Token( - token=userdata_token.token, - device_name=userdata_token.name, - created_at=userdata_token.date, + token=userdata_token["token"], + device_name=userdata_token["name"], + created_at=userdata_token["date"], ) ) return tokens_list - def create_token(self, name: str) -> Token: + def create_token(self, device_name: str) -> Token: """Create new token""" - new_token = Token.generate(device_name=name) + new_token = Token.generate(device_name) with WriteUserData(UserDataFiles.TOKENS) as tokens_file: tokens_file["tokens"].append( diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py new file mode 100644 index 0000000..ca8bc24 --- /dev/null +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -0,0 +1,81 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=missing-function-docstring + +import datetime +import pytest + +from tests.common import read_json +from selfprivacy_api.repositories.tokens.json_tokens_repository import ( + JsonTokensRepository, +) +from selfprivacy_api.models.tokens.token import Token +from selfprivacy_api.repositories.tokens.exceptions import ( + TokenNotFoundError, + RecoveryKeyNotFoundError, + MnemonicError, + RecoveryKeyIsNotValidError, + RecoveryTokenError, +) + + +@pytest.fixture +def tokens(mocker, datadir): + mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=datadir / "tokens.json") + assert read_json(datadir / "tokens.json")["tokens"] == [ + { + "token": "iamtoken", + "name": "primary_token", + "date": "2022-07-15 17:41:31.675698", + } + ] + return datadir + + +def test_get_token_by_token_string(tokens): + repo = JsonTokensRepository() + + assert repo.get_token_by_token_string(token_string="iamtoken") is not None + assert repo.get_token_by_token_string(token_string="iamtoken") == Token( + token="iamtoken", + device_name="primary_token", + created_at=datetime.datetime(2022, 7, 15, 17, 41, 31, 675698), + ) + + +def test_get_token_by_non_existent_token_string(tokens): + repo = JsonTokensRepository() + + with pytest.raises(TokenNotFoundError): + assert repo.get_token_by_token_string(token_string="iamBadtoken") is None + + +def test_get_token_by_name(tokens): + repo = JsonTokensRepository() + + assert repo.get_token_by_name(token_name="primary_token") is not None + assert repo.get_token_by_name(token_name="primary_token") == Token( + token="iamtoken", + device_name="primary_token", + created_at=datetime.datetime(2022, 7, 15, 17, 41, 31, 675698), + ) + + +def test_get_token_by_non_existent_name(tokens): + repo = JsonTokensRepository() + + with pytest.raises(TokenNotFoundError): + assert repo.get_token_by_name(token_name="badname") is None + + +def test_get_tokens(tokens): + repo = JsonTokensRepository() + + assert repo.get_tokens() is not None + assert repo.get_tokens() == [ + Token( + token="iamtoken", + device_name="primary_token", + created_at=datetime.datetime(2022, 7, 15, 17, 41, 31, 675698), + ) + ] diff --git a/tests/test_graphql/test_repository/test_tokens_repository/tokens.json b/tests/test_graphql/test_repository/test_tokens_repository/tokens.json new file mode 100644 index 0000000..b8eaffd --- /dev/null +++ b/tests/test_graphql/test_repository/test_tokens_repository/tokens.json @@ -0,0 +1,20 @@ +{ + "tokens": [ + { + "token": "iamtoken", + "name": "primary_token", + "date": "2022-07-15 17:41:31.675698" + } + ], + "recovery_token": { + "token": "iamtoken", + "date": "2022-09-24T17:54:07.607272", + "expiration": null, + "uses_left": null + }, + "new_device": { + "token": "iamtoken", + "date": "2022-10-26 20:50:47.973212", + "expiration": "2022-10-26 21:00:47.974153" + } +} \ No newline at end of file -- 2.42.0 From 56803a8bd873e5cfcf62e6ee3bf57e472c6eab45 Mon Sep 17 00:00:00 2001 From: def Date: Thu, 10 Nov 2022 03:41:54 +0400 Subject: [PATCH 06/12] tmp commit for inex --- .../tokens/json_tokens_repository.py | 21 +- .../test_repository/test_tokens_repository.py | 219 +++++++++++++++++- .../test_tokens_repository/tokens.json | 2 +- tests/test_graphql/test_users.py | 1 - 4 files changed, 227 insertions(+), 16 deletions(-) diff --git a/selfprivacy_api/repositories/tokens/json_tokens_repository.py b/selfprivacy_api/repositories/tokens/json_tokens_repository.py index e23ed0b..938d419 100644 --- a/selfprivacy_api/repositories/tokens/json_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/json_tokens_repository.py @@ -75,7 +75,7 @@ class JsonTokensRepository(AbstractTokensRepository): { "token": new_token.token, "name": new_token.device_name, - "date": new_token.created_at, + "date": new_token.created_at.strftime("%Y-%m-%d %H:%M:%S.%f"), } ) return new_token @@ -84,10 +84,11 @@ class JsonTokensRepository(AbstractTokensRepository): """Delete the token""" with WriteUserData(UserDataFiles.TOKENS) as tokens_file: for userdata_token in tokens_file["tokens"]: - if userdata_token["token"] == input_token: - tokens_file["tokens"].remove( - userdata_token - ) # Naiji, i pray it works + if userdata_token["token"] == input_token.token: + tokens_file["tokens"].remove(userdata_token) + return + + raise TokenNotFoundError("Token not found!") def refresh_token(self, input_token: Token) -> Token: """Change the token field of the existing token""" @@ -96,7 +97,7 @@ class JsonTokensRepository(AbstractTokensRepository): with WriteUserData(UserDataFiles.TOKENS) as tokens_file: for userdata_token in tokens_file["tokens"]: - if userdata_token["token"] == input_token.token: + if userdata_token["name"] == input_token.device_name: userdata_token["token"] = new_token.token userdata_token["data"] = new_token.created_at @@ -197,17 +198,17 @@ class JsonTokensRepository(AbstractTokensRepository): with WriteUserData(UserDataFiles.TOKENS) as tokens_file: if "new_device" in tokens_file: del tokens_file["new_device"] + return + + raise TokenNotFoundError("Key not found!") def use_mnemonic_new_device_key( self, mnemonic_phrase: str, device_name: str ) -> Token: """Use the mnemonic new device key""" new_device_key = NewDeviceKey.generate() - - if new_device_key.key is None: - raise TokenNotFoundError("Device key is None!") - token = bytes.fromhex(new_device_key.key) + if not Mnemonic(language="english").check(mnemonic_phrase): raise MnemonicError("Phrase is not mnemonic!") diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index ca8bc24..f664ecb 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -2,8 +2,12 @@ # pylint: disable=unused-argument # pylint: disable=missing-function-docstring -import datetime +from datetime import datetime +from typing import Optional +from pydantic import BaseModel import pytest +from selfprivacy_api.models.tokens.new_device_key import NewDeviceKey +from selfprivacy_api.models.tokens.recovery_key import RecoveryKey from tests.common import read_json from selfprivacy_api.repositories.tokens.json_tokens_repository import ( @@ -19,6 +23,83 @@ from selfprivacy_api.repositories.tokens.exceptions import ( ) +class TokenMock(BaseModel): + token: str + device_name: str + created_at: datetime + + @staticmethod + def generate(device_name: str) -> "Token": + return Token( + token="iamtoken", + device_name="imnew", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + ) + + +class NewDeviceKeyMock(BaseModel): + key: str + created_at: datetime + expires_at: datetime + + @staticmethod + def generate() -> "NewDeviceKey": + return NewDeviceKey( + key="imkey", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + expires_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + ) + + +class RecoveryKeyMock(BaseModel): + key: str + created_at: datetime + expires_at: Optional[datetime] + uses_left: Optional[int] + + @staticmethod + def generate( + expiration: Optional[datetime], + uses_left: Optional[int], + ) -> "RecoveryKey": + return RecoveryKey( + key="imnewrecoverykey", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + expires_at=None, + uses_left=1, + ) + + +@pytest.fixture +def mock_new_device_key_generate(mocker): + mock = mocker.patch( + "selfprivacy_api.repositories.tokens.json_tokens_repository.NewDeviceKey", + autospec=True, + return_value=NewDeviceKeyMock, + ) + return mock + + +@pytest.fixture +def mock_token_generate(mocker): + mock = mocker.patch( + "selfprivacy_api.repositories.tokens.json_tokens_repository.Token", + autospec=True, + return_value=TokenMock, + ) + return mock + + +@pytest.fixture +def mock_recovery_key_generate(mocker): + mock = mocker.patch( + "selfprivacy_api.repositories.tokens.json_tokens_repository.RecoveryKey", + autospec=True, + return_value=RecoveryKeyMock, + ) + return mock + + @pytest.fixture def tokens(mocker, datadir): mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=datadir / "tokens.json") @@ -39,7 +120,7 @@ def test_get_token_by_token_string(tokens): assert repo.get_token_by_token_string(token_string="iamtoken") == Token( token="iamtoken", device_name="primary_token", - created_at=datetime.datetime(2022, 7, 15, 17, 41, 31, 675698), + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), ) @@ -57,7 +138,7 @@ def test_get_token_by_name(tokens): assert repo.get_token_by_name(token_name="primary_token") == Token( token="iamtoken", device_name="primary_token", - created_at=datetime.datetime(2022, 7, 15, 17, 41, 31, 675698), + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), ) @@ -76,6 +157,136 @@ def test_get_tokens(tokens): Token( token="iamtoken", device_name="primary_token", - created_at=datetime.datetime(2022, 7, 15, 17, 41, 31, 675698), + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), ) ] + + +def test_create_token(tokens, mock_token_generate): + repo = JsonTokensRepository() + assert repo.create_token(device_name="imnew") is not None + assert repo.create_token(device_name="imnew") == Token( + token="iamtoken", + device_name="imnew", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + ) + + +def test_delete_token(tokens, datadir): + repo = JsonTokensRepository() + input_token = Token( + token="iamtoken", + device_name="primary_token", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + ) + + assert repo.delete_token(input_token) is None + assert read_json(datadir / "tokens.json")["tokens"] == [] + + +def test_delete_not_found_token(tokens, datadir): + repo = JsonTokensRepository() + input_token = Token( + token="imbadtoken", + device_name="primary_token", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + ) + with pytest.raises(TokenNotFoundError): + assert repo.delete_token(input_token) is None + + +def test_refresh_token(tokens, mock_token_generate): + repo = JsonTokensRepository() + input_token = Token( + token="imtoken", + device_name="primary_token", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + ) + + assert repo.refresh_token(input_token) is not None + assert repo.refresh_token(input_token) == Token( + token="iamtoken", + device_name="imnew", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + ) + + +def test_refresh_not_found_token(tokens, mock_token_generate): + repo = JsonTokensRepository() + input_token = Token( + token="idontknowwhoiam", + device_name="tellmewhoiam?", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + ) + + with pytest.raises(TokenNotFoundError): + assert repo.refresh_token(input_token) is None + + +def test_get_recovery_key(tokens): + repo = JsonTokensRepository() + + assert repo.get_recovery_key() is not None + assert repo.get_recovery_key() == RecoveryKey( + key="iamtoken", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + expires_at=None, + uses_left=None, + ) + + +def test_create_recovery_key(tokens, mock_recovery_key_generate, datadir): + repo = JsonTokensRepository() + + assert repo.create_recovery_key(uses_left=1, expiration=None) is not None + assert read_json(datadir / "tokens.json")["recovery_token"] == RecoveryKey( + key="imnewrecoverykey", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + expires_at=None, + uses_left=1, + ) + + +def test_get_new_device_key(tokens, mock_new_device_key_generate, datadir): + repo = JsonTokensRepository() + + assert repo.get_new_device_key() is not None + # assert read_json(datadir / "tokens.json")["new_device"] == RecoveryKey( + # key="imrecoverykey", + # created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + # expires_at=None, + # uses_left=1, + # ) + + +# use_mnemonic_recovery_key +# use_mnemonic_new_device_key + + +def test_delete_new_device_key(tokens, datadir): + repo = JsonTokensRepository() + + assert repo.delete_new_device_key() is None + assert "new_device" not in read_json(datadir / "tokens.json") + + +#################################################### + + +def test_use_mnemonic_new_device_key( + tokens, mock_new_device_key_generate, datadir, mock_token_generate +): + repo = JsonTokensRepository() + + assert repo.use_mnemonic_new_device_key( + device_name="imnew", mnemonic_phrase="oh-no" + ) == Token( + token="iamtoken", + device_name="imnew", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + ) + assert read_json(datadir / "tokens.json")["new_device"] == [] + + +def use_mnemonic_recovery_key(): + ... diff --git a/tests/test_graphql/test_repository/test_tokens_repository/tokens.json b/tests/test_graphql/test_repository/test_tokens_repository/tokens.json index b8eaffd..bce68b0 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository/tokens.json +++ b/tests/test_graphql/test_repository/test_tokens_repository/tokens.json @@ -8,7 +8,7 @@ ], "recovery_token": { "token": "iamtoken", - "date": "2022-09-24T17:54:07.607272", + "date": "2022-07-15 17:41:31.675698", "expiration": null, "uses_left": null }, diff --git a/tests/test_graphql/test_users.py b/tests/test_graphql/test_users.py index c36dcb2..7a65736 100644 --- a/tests/test_graphql/test_users.py +++ b/tests/test_graphql/test_users.py @@ -516,7 +516,6 @@ def test_graphql_add_long_username(authorized_client, one_user, mock_subprocess_ }, }, ) - assert response.status_code == 200 assert response.json().get("data") is not None assert response.json()["data"]["createUser"]["message"] is not None -- 2.42.0 From a0963c261cf7c4348647b10deee608fe80d534d7 Mon Sep 17 00:00:00 2001 From: def Date: Mon, 14 Nov 2022 06:19:25 +0400 Subject: [PATCH 07/12] test: i tried so hard and get so far --- .../test_repository/test_tokens_repository.py | 329 ++++++++++++------ .../test_tokens_repository/tokens.json | 10 +- 2 files changed, 228 insertions(+), 111 deletions(-) diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index f664ecb..5bcb300 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -3,79 +3,74 @@ # pylint: disable=missing-function-docstring from datetime import datetime -from typing import Optional -from pydantic import BaseModel + import pytest + from selfprivacy_api.models.tokens.new_device_key import NewDeviceKey from selfprivacy_api.models.tokens.recovery_key import RecoveryKey - -from tests.common import read_json +from selfprivacy_api.models.tokens.token import Token +from selfprivacy_api.repositories.tokens.exceptions import ( + MnemonicError, + RecoveryKeyIsNotValidError, + RecoveryKeyNotFoundError, + RecoveryTokenError, + TokenNotFoundError, +) from selfprivacy_api.repositories.tokens.json_tokens_repository import ( JsonTokensRepository, ) -from selfprivacy_api.models.tokens.token import Token -from selfprivacy_api.repositories.tokens.exceptions import ( - TokenNotFoundError, - RecoveryKeyNotFoundError, - MnemonicError, - RecoveryKeyIsNotValidError, - RecoveryTokenError, -) +from tests.common import read_json -class TokenMock(BaseModel): - token: str - device_name: str - created_at: datetime - - @staticmethod - def generate(device_name: str) -> "Token": - return Token( - token="iamtoken", - device_name="imnew", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), - ) - - -class NewDeviceKeyMock(BaseModel): - key: str - created_at: datetime - expires_at: datetime - - @staticmethod - def generate() -> "NewDeviceKey": - return NewDeviceKey( - key="imkey", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), - expires_at=datetime(2022, 7, 15, 17, 41, 31, 675698), - ) - - -class RecoveryKeyMock(BaseModel): - key: str - created_at: datetime - expires_at: Optional[datetime] - uses_left: Optional[int] - - @staticmethod - def generate( - expiration: Optional[datetime], - uses_left: Optional[int], - ) -> "RecoveryKey": - return RecoveryKey( - key="imnewrecoverykey", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), - expires_at=None, - uses_left=1, - ) +class RecoveryKeyMockReturnNotValid: + def is_valid() -> bool: + return False @pytest.fixture def mock_new_device_key_generate(mocker): mock = mocker.patch( - "selfprivacy_api.repositories.tokens.json_tokens_repository.NewDeviceKey", + "selfprivacy_api.repositories.tokens.json_tokens_repository.NewDeviceKey.generate", autospec=True, - return_value=NewDeviceKeyMock, + return_value=NewDeviceKey( + key="43478d05b35e4781598acd76e33832bb", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + expires_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + ), + ) + return mock + + +@pytest.fixture +def mock_get_recovery_key_return_none(mocker): + mock = mocker.patch( + "selfprivacy_api.repositories.tokens.json_tokens_repository.JsonTokensRepository.get_recovery_key", + autospec=True, + return_value=None, + ) + return mock + + +@pytest.fixture +def mock_generate_token(mocker): + mock = mocker.patch( + "selfprivacy_api.repositories.tokens.json_tokens_repository.Token.generate", + autospec=True, + return_value=Token( + token="ur71mC4aiI6FIYAN--cTL-38rPHS5D6NuB1bgN_qKF4", + device_name="newdevice", + created_at=datetime(2022, 11, 14, 6, 6, 32, 777123), + ), + ) + return mock + + +@pytest.fixture +def mock_get_recovery_key_return_not_valid(mocker): + mock = mocker.patch( + "selfprivacy_api.repositories.tokens.json_tokens_repository.JsonTokensRepository.get_recovery_key", + autospec=True, + return_value=RecoveryKeyMockReturnNotValid, ) return mock @@ -83,9 +78,13 @@ def mock_new_device_key_generate(mocker): @pytest.fixture def mock_token_generate(mocker): mock = mocker.patch( - "selfprivacy_api.repositories.tokens.json_tokens_repository.Token", + "selfprivacy_api.repositories.tokens.json_tokens_repository.Token.generate", autospec=True, - return_value=TokenMock, + return_value=Token( + token="ZuLNKtnxDeq6w2dpOJhbB3iat_sJLPTPl_rN5uc5MvM", + device_name="IamNewDevice", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + ), ) return mock @@ -93,9 +92,14 @@ def mock_token_generate(mocker): @pytest.fixture def mock_recovery_key_generate(mocker): mock = mocker.patch( - "selfprivacy_api.repositories.tokens.json_tokens_repository.RecoveryKey", + "selfprivacy_api.repositories.tokens.json_tokens_repository.RecoveryKey.generate", autospec=True, - return_value=RecoveryKeyMock, + return_value=RecoveryKey( + key="889bf49c1d3199d71a2e704718772bd53a422020334db051", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + expires_at=None, + uses_left=1, + ), ) return mock @@ -105,7 +109,7 @@ def tokens(mocker, datadir): mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=datadir / "tokens.json") assert read_json(datadir / "tokens.json")["tokens"] == [ { - "token": "iamtoken", + "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", "name": "primary_token", "date": "2022-07-15 17:41:31.675698", } @@ -116,9 +120,10 @@ def tokens(mocker, datadir): def test_get_token_by_token_string(tokens): repo = JsonTokensRepository() - assert repo.get_token_by_token_string(token_string="iamtoken") is not None - assert repo.get_token_by_token_string(token_string="iamtoken") == Token( - token="iamtoken", + assert repo.get_token_by_token_string( + token_string="KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI" + ) == Token( + token="KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", device_name="primary_token", created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), ) @@ -136,7 +141,7 @@ def test_get_token_by_name(tokens): assert repo.get_token_by_name(token_name="primary_token") is not None assert repo.get_token_by_name(token_name="primary_token") == Token( - token="iamtoken", + token="KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", device_name="primary_token", created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), ) @@ -155,7 +160,7 @@ def test_get_tokens(tokens): assert repo.get_tokens() is not None assert repo.get_tokens() == [ Token( - token="iamtoken", + token="KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", device_name="primary_token", created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), ) @@ -164,10 +169,10 @@ def test_get_tokens(tokens): def test_create_token(tokens, mock_token_generate): repo = JsonTokensRepository() - assert repo.create_token(device_name="imnew") is not None - assert repo.create_token(device_name="imnew") == Token( - token="iamtoken", - device_name="imnew", + + assert repo.create_token(device_name="IamNewDevice") == Token( + token="ZuLNKtnxDeq6w2dpOJhbB3iat_sJLPTPl_rN5uc5MvM", + device_name="IamNewDevice", created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), ) @@ -175,7 +180,7 @@ def test_create_token(tokens, mock_token_generate): def test_delete_token(tokens, datadir): repo = JsonTokensRepository() input_token = Token( - token="iamtoken", + token="KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", device_name="primary_token", created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), ) @@ -198,15 +203,14 @@ def test_delete_not_found_token(tokens, datadir): def test_refresh_token(tokens, mock_token_generate): repo = JsonTokensRepository() input_token = Token( - token="imtoken", + token="KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", device_name="primary_token", created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), ) - assert repo.refresh_token(input_token) is not None assert repo.refresh_token(input_token) == Token( - token="iamtoken", - device_name="imnew", + token="ZuLNKtnxDeq6w2dpOJhbB3iat_sJLPTPl_rN5uc5MvM", + device_name="IamNewDevice", created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), ) @@ -228,10 +232,10 @@ def test_get_recovery_key(tokens): assert repo.get_recovery_key() is not None assert repo.get_recovery_key() == RecoveryKey( - key="iamtoken", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + key="ed653e4b8b042b841d285fa7a682fa09e925ddb2d8906f54", + created_at=datetime(2022, 11, 11, 11, 48, 54, 228038), expires_at=None, - uses_left=None, + uses_left=2, ) @@ -239,28 +243,23 @@ def test_create_recovery_key(tokens, mock_recovery_key_generate, datadir): repo = JsonTokensRepository() assert repo.create_recovery_key(uses_left=1, expiration=None) is not None - assert read_json(datadir / "tokens.json")["recovery_token"] == RecoveryKey( - key="imnewrecoverykey", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), - expires_at=None, - uses_left=1, - ) + assert read_json(datadir / "tokens.json")["recovery_token"] == { + "token": "889bf49c1d3199d71a2e704718772bd53a422020334db051", + "date": "2022-11-11T11:48:54.228038", + "expiration": None, + "uses_left": 1, + } # проблемы, файл не изменяется. не представляю причину def test_get_new_device_key(tokens, mock_new_device_key_generate, datadir): repo = JsonTokensRepository() assert repo.get_new_device_key() is not None - # assert read_json(datadir / "tokens.json")["new_device"] == RecoveryKey( - # key="imrecoverykey", - # created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), - # expires_at=None, - # uses_left=1, - # ) - - -# use_mnemonic_recovery_key -# use_mnemonic_new_device_key + assert read_json(datadir / "tokens.json")["new_device"] == { + "data": "2022-07-15 17:41:31.675698", + "expiration": "2022-07-15 17:41:31.675698", + "token": "43478d05b35e4781598acd76e33832bb", + } def test_delete_new_device_key(tokens, datadir): @@ -273,20 +272,138 @@ def test_delete_new_device_key(tokens, datadir): #################################################### +def test_use_bad_mnemonic_phrase_new_device_key( + tokens, mock_new_device_key_generate, datadir, mock_token_generate +): + repo = JsonTokensRepository() + + with pytest.raises(MnemonicError): + assert ( + repo.use_mnemonic_new_device_key( + device_name="imnew", + mnemonic_phrase="oh-no", + ) + is None + ) + + +def test_use_not_exists_mnemonic_new_device_key( + tokens, mock_new_device_key_generate, datadir, mock_token_generate +): + repo = JsonTokensRepository() + + with pytest.raises(TokenNotFoundError): + assert ( + repo.use_mnemonic_new_device_key( + device_name="imnew", + mnemonic_phrase="uniform clarify napkin bid dress search input armor police cross salon because myself uphold slice bamboo hungry park", + ) + is None + ) + + def test_use_mnemonic_new_device_key( tokens, mock_new_device_key_generate, datadir, mock_token_generate ): repo = JsonTokensRepository() - assert repo.use_mnemonic_new_device_key( - device_name="imnew", mnemonic_phrase="oh-no" - ) == Token( - token="iamtoken", - device_name="imnew", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + assert ( + repo.use_mnemonic_new_device_key( + device_name="imnew", + mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb", + ) + is not None ) - assert read_json(datadir / "tokens.json")["new_device"] == [] + # assert read_json(datadir / "tokens.json")["new_device"] == [] -def use_mnemonic_recovery_key(): - ... +def test_use_none_mnemonic_recovery_key( + datadir, tokens, mock_get_recovery_key_return_none +): + repo = JsonTokensRepository() + + with pytest.raises(RecoveryKeyNotFoundError): + assert ( + repo.use_mnemonic_recovery_key( + mnemonic_phrase="i love you", + device_name="primary_token", + ) + is None + ) + + +def test_use_mnemonic_not_valid_recovery_key( + datadir, tokens, mock_get_recovery_key_return_not_valid +): + repo = JsonTokensRepository() + + with pytest.raises(RecoveryKeyIsNotValidError): + assert ( + repo.use_mnemonic_recovery_key( + mnemonic_phrase="sorry, it was joke", + device_name="primary_token", + ) + is None + ) + + +def test_use_not_mnemonic_recovery_key( + datadir, + tokens, +): + repo = JsonTokensRepository() + + with pytest.raises(MnemonicError): + assert ( + repo.use_mnemonic_recovery_key( + mnemonic_phrase="please come back", + device_name="primary_token", + ) + is None + ) + + +def test_use_not_found_mnemonic_recovery_key(datadir, tokens): + repo = JsonTokensRepository() + + with pytest.raises(RecoveryTokenError): + assert ( + repo.use_mnemonic_recovery_key( + mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb", + device_name="primary_token", + ) + is None + ) + + +def test_use_mnemonic_recovery_key(datadir, tokens, mock_generate_token): + repo = JsonTokensRepository() + + assert repo.use_mnemonic_recovery_key( + mnemonic_phrase="uniform clarify napkin bid dress search input armor police cross salon because myself uphold slice bamboo hungry park", + device_name="newdevice", + ) == Token( + token="ur71mC4aiI6FIYAN--cTL-38rPHS5D6NuB1bgN_qKF4", + device_name="newdevice", + created_at=datetime(2022, 11, 14, 6, 6, 32, 777123), + ) + + assert read_json(datadir / "tokens.json")["tokens"] == [ + { + "date": "2022-07-15 17:41:31.675698", + "name": "primary_token", + "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", + }, + { + "date": "2022-11-14 06:06:32.777123", + "name": "newdevice", + "token": "ur71mC4aiI6FIYAN--cTL-38rPHS5D6NuB1bgN_qKF4", + }, + ] + + assert read_json(datadir / "tokens.json")["recovery_token"] == { + "date": "2022-11-11T11:48:54.228038", + "expiration": None, + "token": "ed653e4b8b042b841d285fa7a682fa09e925ddb2d8906f54", + "uses_left": 1, + } diff --git a/tests/test_graphql/test_repository/test_tokens_repository/tokens.json b/tests/test_graphql/test_repository/test_tokens_repository/tokens.json index bce68b0..a250a29 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository/tokens.json +++ b/tests/test_graphql/test_repository/test_tokens_repository/tokens.json @@ -1,19 +1,19 @@ { "tokens": [ { - "token": "iamtoken", + "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", "name": "primary_token", "date": "2022-07-15 17:41:31.675698" } ], "recovery_token": { - "token": "iamtoken", - "date": "2022-07-15 17:41:31.675698", + "token": "ed653e4b8b042b841d285fa7a682fa09e925ddb2d8906f54", + "date": "2022-11-11T11:48:54.228038", "expiration": null, - "uses_left": null + "uses_left": 2 }, "new_device": { - "token": "iamtoken", + "token": "2237238de23dc71ab558e317bdb8ff8e", "date": "2022-10-26 20:50:47.973212", "expiration": "2022-10-26 21:00:47.974153" } -- 2.42.0 From c2f18e9439632e9596aafcc0a22c9dbda209c8b2 Mon Sep 17 00:00:00 2001 From: def Date: Tue, 15 Nov 2022 16:21:18 +0400 Subject: [PATCH 08/12] pity --- .../tokens/json_tokens_repository.py | 36 ++++++++++++------- .../test_repository/test_tokens_repository.py | 6 ++-- 2 files changed, 26 insertions(+), 16 deletions(-) diff --git a/selfprivacy_api/repositories/tokens/json_tokens_repository.py b/selfprivacy_api/repositories/tokens/json_tokens_repository.py index 938d419..5f04eea 100644 --- a/selfprivacy_api/repositories/tokens/json_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/json_tokens_repository.py @@ -99,7 +99,9 @@ class JsonTokensRepository(AbstractTokensRepository): if userdata_token["name"] == input_token.device_name: userdata_token["token"] = new_token.token - userdata_token["data"] = new_token.created_at + userdata_token["date"] = ( + new_token.created_at.strftime("%Y-%m-%d %H:%M:%S.%f"), + ) return new_token @@ -128,12 +130,12 @@ class JsonTokensRepository(AbstractTokensRepository): ) -> RecoveryKey: """Create the recovery key""" - recovery_key = RecoveryKey.generate(expiration=expiration, uses_left=uses_left) + recovery_key = RecoveryKey.generate(expiration, uses_left) - with ReadUserData(UserDataFiles.TOKENS) as tokens_file: - tokens_file["recovery_key"] = { + with WriteUserData(UserDataFiles.TOKENS) as tokens_file: + tokens_file["recovery_token"] = { "token": recovery_key.key, - "date": recovery_key.created_at, + "date": recovery_key.created_at.strftime("%Y-%m-%d %H:%M:%S.%f"), "expiration": recovery_key.expires_at, "uses_left": recovery_key.uses_left, } @@ -159,7 +161,7 @@ class JsonTokensRepository(AbstractTokensRepository): phrase_bytes = Mnemonic(language="english").to_entropy(mnemonic_phrase) if phrase_bytes != recovery_token: - raise RecoveryTokenError("Phrase is not (?) recovery token") + raise RecoveryTokenError("Phrase is not recovery token") new_token = Token.generate(device_name=device_name) @@ -168,7 +170,7 @@ class JsonTokensRepository(AbstractTokensRepository): { "token": new_token.token, "name": new_token.device_name, - "date": new_token.created_at, + "date": new_token.created_at.strftime("%Y-%m-%d %H:%M:%S.%f"), } ) @@ -187,8 +189,10 @@ class JsonTokensRepository(AbstractTokensRepository): with WriteUserData(UserDataFiles.TOKENS) as tokens_file: tokens_file["new_device"] = { "token": new_device_key.key, - "data": new_device_key.created_at, - "expiration": new_device_key.expires_at, + "data": new_device_key.created_at.strftime("%Y-%m-%d %H:%M:%S.%f"), + "expiration": new_device_key.expires_at.strftime( + "%Y-%m-%d %H:%M:%S.%f" + ), } return new_device_key @@ -206,15 +210,21 @@ class JsonTokensRepository(AbstractTokensRepository): self, mnemonic_phrase: str, device_name: str ) -> Token: """Use the mnemonic new device key""" - new_device_key = NewDeviceKey.generate() + new_device_key = self.get_new_device_key() + + if new_device_key is None: + raise TokenNotFoundError("New device key not found!") + token = bytes.fromhex(new_device_key.key) - + if not Mnemonic(language="english").check(mnemonic_phrase): raise MnemonicError("Phrase is not mnemonic!") phrase_bytes = Mnemonic(language="english").to_entropy(mnemonic_phrase) - if phrase_bytes != token: - raise MnemonicError("Phrase is not token!") + if bytes(phrase_bytes) != bytes( + token + ): # idk why, но оно не робит, хотя оригинальную логику я сохранил + raise TokenNotFoundError("Phrase is not token!") new_token = Token.generate(device_name=device_name) with WriteUserData(UserDataFiles.TOKENS) as tokens: diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index 5bcb300..7566372 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -245,10 +245,10 @@ def test_create_recovery_key(tokens, mock_recovery_key_generate, datadir): assert repo.create_recovery_key(uses_left=1, expiration=None) is not None assert read_json(datadir / "tokens.json")["recovery_token"] == { "token": "889bf49c1d3199d71a2e704718772bd53a422020334db051", - "date": "2022-11-11T11:48:54.228038", + "date": "2022-07-15 17:41:31.675698", "expiration": None, "uses_left": 1, - } # проблемы, файл не изменяется. не представляю причину + } def test_get_new_device_key(tokens, mock_new_device_key_generate, datadir): @@ -256,7 +256,7 @@ def test_get_new_device_key(tokens, mock_new_device_key_generate, datadir): assert repo.get_new_device_key() is not None assert read_json(datadir / "tokens.json")["new_device"] == { - "data": "2022-07-15 17:41:31.675698", + "date": "2022-07-15 17:41:31.675698", "expiration": "2022-07-15 17:41:31.675698", "token": "43478d05b35e4781598acd76e33832bb", } -- 2.42.0 From c25bf44dc7decd6fa3f77e11101938b512b6888d Mon Sep 17 00:00:00 2001 From: def Date: Tue, 15 Nov 2022 20:29:16 +0400 Subject: [PATCH 09/12] fix new device --- .../repositories/tokens/json_tokens_repository.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/repositories/tokens/json_tokens_repository.py b/selfprivacy_api/repositories/tokens/json_tokens_repository.py index 5f04eea..6b656ff 100644 --- a/selfprivacy_api/repositories/tokens/json_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/json_tokens_repository.py @@ -210,7 +210,13 @@ class JsonTokensRepository(AbstractTokensRepository): self, mnemonic_phrase: str, device_name: str ) -> Token: """Use the mnemonic new device key""" - new_device_key = self.get_new_device_key() + + with WriteUserData(UserDataFiles.TOKENS) as tokens_file: + new_device_key = NewDeviceKey( + key=tokens_file["new_device"]["token"], + created_at=tokens_file["new_device"]["date"], + expires_at=tokens_file["new_device"]["expiration"], + ) if new_device_key is None: raise TokenNotFoundError("New device key not found!") -- 2.42.0 From 89b6c3e32ee7a121214dc1d3a9b23173862ec44a Mon Sep 17 00:00:00 2001 From: Inex Code Date: Wed, 16 Nov 2022 13:25:29 +0300 Subject: [PATCH 10/12] fix(auth): Typo in tokens repository --- selfprivacy_api/repositories/tokens/json_tokens_repository.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/selfprivacy_api/repositories/tokens/json_tokens_repository.py b/selfprivacy_api/repositories/tokens/json_tokens_repository.py index 6b656ff..85a27b3 100644 --- a/selfprivacy_api/repositories/tokens/json_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/json_tokens_repository.py @@ -189,7 +189,7 @@ class JsonTokensRepository(AbstractTokensRepository): with WriteUserData(UserDataFiles.TOKENS) as tokens_file: tokens_file["new_device"] = { "token": new_device_key.key, - "data": new_device_key.created_at.strftime("%Y-%m-%d %H:%M:%S.%f"), + "date": new_device_key.created_at.strftime("%Y-%m-%d %H:%M:%S.%f"), "expiration": new_device_key.expires_at.strftime( "%Y-%m-%d %H:%M:%S.%f" ), -- 2.42.0 From b31c06a0f78c09db82afaf1ab04258d1363a90a7 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Wed, 16 Nov 2022 13:58:41 +0300 Subject: [PATCH 11/12] fix(tokens): datetime formatting --- .../tokens/json_tokens_repository.py | 16 ++-- .../test_repository/test_tokens_repository.py | 77 +++++++++++++++++-- .../test_tokens_repository/tokens.json | 17 +++- 3 files changed, 93 insertions(+), 17 deletions(-) diff --git a/selfprivacy_api/repositories/tokens/json_tokens_repository.py b/selfprivacy_api/repositories/tokens/json_tokens_repository.py index 85a27b3..7cb5296 100644 --- a/selfprivacy_api/repositories/tokens/json_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/json_tokens_repository.py @@ -20,6 +20,8 @@ from selfprivacy_api.repositories.tokens.abstract_tokens_repository import ( ) from mnemonic import Mnemonic +DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%f" + class JsonTokensRepository(AbstractTokensRepository): def get_token_by_token_string(self, token_string: str) -> Optional[Token]: @@ -75,7 +77,7 @@ class JsonTokensRepository(AbstractTokensRepository): { "token": new_token.token, "name": new_token.device_name, - "date": new_token.created_at.strftime("%Y-%m-%d %H:%M:%S.%f"), + "date": new_token.created_at.strftime(DATETIME_FORMAT), } ) return new_token @@ -100,7 +102,7 @@ class JsonTokensRepository(AbstractTokensRepository): if userdata_token["name"] == input_token.device_name: userdata_token["token"] = new_token.token userdata_token["date"] = ( - new_token.created_at.strftime("%Y-%m-%d %H:%M:%S.%f"), + new_token.created_at.strftime(DATETIME_FORMAT), ) return new_token @@ -135,7 +137,7 @@ class JsonTokensRepository(AbstractTokensRepository): with WriteUserData(UserDataFiles.TOKENS) as tokens_file: tokens_file["recovery_token"] = { "token": recovery_key.key, - "date": recovery_key.created_at.strftime("%Y-%m-%d %H:%M:%S.%f"), + "date": recovery_key.created_at.strftime(DATETIME_FORMAT), "expiration": recovery_key.expires_at, "uses_left": recovery_key.uses_left, } @@ -170,7 +172,7 @@ class JsonTokensRepository(AbstractTokensRepository): { "token": new_token.token, "name": new_token.device_name, - "date": new_token.created_at.strftime("%Y-%m-%d %H:%M:%S.%f"), + "date": new_token.created_at.strftime(DATETIME_FORMAT), } ) @@ -189,10 +191,8 @@ class JsonTokensRepository(AbstractTokensRepository): with WriteUserData(UserDataFiles.TOKENS) as tokens_file: tokens_file["new_device"] = { "token": new_device_key.key, - "date": new_device_key.created_at.strftime("%Y-%m-%d %H:%M:%S.%f"), - "expiration": new_device_key.expires_at.strftime( - "%Y-%m-%d %H:%M:%S.%f" - ), + "date": new_device_key.created_at.strftime(DATETIME_FORMAT), + "expiration": new_device_key.expires_at.strftime(DATETIME_FORMAT), } return new_device_key diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index 7566372..8bdf1fb 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -2,7 +2,7 @@ # pylint: disable=unused-argument # pylint: disable=missing-function-docstring -from datetime import datetime +from datetime import datetime, timezone import pytest @@ -112,7 +112,22 @@ def tokens(mocker, datadir): "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", "name": "primary_token", "date": "2022-07-15 17:41:31.675698", - } + }, + { + "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", + "name": "second_token", + "date": "2022-07-15 17:41:31.675698Z", + }, + { + "token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", + "name": "third_token", + "date": "2022-07-15T17:41:31.675698Z", + }, + { + "token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", + "name": "forth_token", + "date": "2022-07-15T17:41:31.675698", + }, ] return datadir @@ -163,7 +178,22 @@ def test_get_tokens(tokens): token="KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", device_name="primary_token", created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), - ) + ), + Token( + token="3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", + device_name="second_token", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698, tzinfo=timezone.utc), + ), + Token( + token="LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", + device_name="third_token", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698, tzinfo=timezone.utc), + ), + Token( + token="dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", + device_name="forth_token", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + ), ] @@ -186,7 +216,23 @@ def test_delete_token(tokens, datadir): ) assert repo.delete_token(input_token) is None - assert read_json(datadir / "tokens.json")["tokens"] == [] + assert read_json(datadir / "tokens.json")["tokens"] == [ + { + "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", + "name": "second_token", + "date": "2022-07-15 17:41:31.675698Z", + }, + { + "token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", + "name": "third_token", + "date": "2022-07-15T17:41:31.675698Z", + }, + { + "token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", + "name": "forth_token", + "date": "2022-07-15T17:41:31.675698", + }, + ] def test_delete_not_found_token(tokens, datadir): @@ -245,7 +291,7 @@ def test_create_recovery_key(tokens, mock_recovery_key_generate, datadir): assert repo.create_recovery_key(uses_left=1, expiration=None) is not None assert read_json(datadir / "tokens.json")["recovery_token"] == { "token": "889bf49c1d3199d71a2e704718772bd53a422020334db051", - "date": "2022-07-15 17:41:31.675698", + "date": "2022-07-15T17:41:31.675698", "expiration": None, "uses_left": 1, } @@ -256,8 +302,8 @@ def test_get_new_device_key(tokens, mock_new_device_key_generate, datadir): assert repo.get_new_device_key() is not None assert read_json(datadir / "tokens.json")["new_device"] == { - "date": "2022-07-15 17:41:31.675698", - "expiration": "2022-07-15 17:41:31.675698", + "date": "2022-07-15T17:41:31.675698", + "expiration": "2022-07-15T17:41:31.675698", "token": "43478d05b35e4781598acd76e33832bb", } @@ -395,7 +441,22 @@ def test_use_mnemonic_recovery_key(datadir, tokens, mock_generate_token): "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", }, { - "date": "2022-11-14 06:06:32.777123", + "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", + "name": "second_token", + "date": "2022-07-15 17:41:31.675698Z", + }, + { + "token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", + "name": "third_token", + "date": "2022-07-15T17:41:31.675698Z", + }, + { + "token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", + "name": "forth_token", + "date": "2022-07-15T17:41:31.675698", + }, + { + "date": "2022-11-14T06:06:32.777123", "name": "newdevice", "token": "ur71mC4aiI6FIYAN--cTL-38rPHS5D6NuB1bgN_qKF4", }, diff --git a/tests/test_graphql/test_repository/test_tokens_repository/tokens.json b/tests/test_graphql/test_repository/test_tokens_repository/tokens.json index a250a29..bb1805c 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository/tokens.json +++ b/tests/test_graphql/test_repository/test_tokens_repository/tokens.json @@ -4,6 +4,21 @@ "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", "name": "primary_token", "date": "2022-07-15 17:41:31.675698" + }, + { + "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", + "name": "second_token", + "date": "2022-07-15 17:41:31.675698Z" + }, + { + "token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", + "name": "third_token", + "date": "2022-07-15T17:41:31.675698Z" + }, + { + "token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", + "name": "forth_token", + "date": "2022-07-15T17:41:31.675698" } ], "recovery_token": { @@ -17,4 +32,4 @@ "date": "2022-10-26 20:50:47.973212", "expiration": "2022-10-26 21:00:47.974153" } -} \ No newline at end of file +} -- 2.42.0 From 042f2b231034745b6398c45766cb38b6a91b10ab Mon Sep 17 00:00:00 2001 From: Inex Code Date: Wed, 16 Nov 2022 20:08:05 +0300 Subject: [PATCH 12/12] fix(tokens): normalize exceptions, fix KeyErrors --- .../repositories/tokens/exceptions.py | 14 +- .../tokens/json_tokens_repository.py | 50 +- .../test_repository/test_tokens_repository.py | 450 +++++++++++------- .../test_tokens_repository/empty_keys.json | 9 + .../test_tokens_repository/null_keys.json | 26 + 5 files changed, 345 insertions(+), 204 deletions(-) create mode 100644 tests/test_graphql/test_repository/test_tokens_repository/empty_keys.json create mode 100644 tests/test_graphql/test_repository/test_tokens_repository/null_keys.json diff --git a/selfprivacy_api/repositories/tokens/exceptions.py b/selfprivacy_api/repositories/tokens/exceptions.py index d5049b4..6b419c7 100644 --- a/selfprivacy_api/repositories/tokens/exceptions.py +++ b/selfprivacy_api/repositories/tokens/exceptions.py @@ -1,18 +1,14 @@ -class TokenNotFoundError(Exception): +class TokenNotFound(Exception): """Token not found!""" -class RecoveryKeyNotFoundError(Exception): +class RecoveryKeyNotFound(Exception): """Recovery key not found!""" -class MnemonicError(Exception): +class InvalidMnemonic(Exception): """Phrase is not mnemonic!""" -class RecoveryKeyIsNotValidError(Exception): - """Recovery key is not valid!""" - - -class RecoveryTokenError(Exception): - """Error ???""" +class NewDeviceKeyNotFound(Exception): + """New device key not found!""" diff --git a/selfprivacy_api/repositories/tokens/json_tokens_repository.py b/selfprivacy_api/repositories/tokens/json_tokens_repository.py index 7cb5296..aad3158 100644 --- a/selfprivacy_api/repositories/tokens/json_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/json_tokens_repository.py @@ -3,22 +3,21 @@ temporary legacy """ from typing import Optional from datetime import datetime +from mnemonic import Mnemonic from selfprivacy_api.utils import UserDataFiles, WriteUserData, ReadUserData from selfprivacy_api.models.tokens.token import Token from selfprivacy_api.models.tokens.recovery_key import RecoveryKey from selfprivacy_api.models.tokens.new_device_key import NewDeviceKey from selfprivacy_api.repositories.tokens.exceptions import ( - TokenNotFoundError, - RecoveryKeyNotFoundError, - MnemonicError, - RecoveryKeyIsNotValidError, - RecoveryTokenError, + TokenNotFound, + RecoveryKeyNotFound, + InvalidMnemonic, + NewDeviceKeyNotFound, ) from selfprivacy_api.repositories.tokens.abstract_tokens_repository import ( AbstractTokensRepository, ) -from mnemonic import Mnemonic DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%f" @@ -36,7 +35,7 @@ class JsonTokensRepository(AbstractTokensRepository): created_at=userdata_token["date"], ) - raise TokenNotFoundError("Token not found!") + raise TokenNotFound("Token not found!") def get_token_by_name(self, token_name: str) -> Optional[Token]: """Get the token by name""" @@ -50,7 +49,7 @@ class JsonTokensRepository(AbstractTokensRepository): created_at=userdata_token["date"], ) - raise TokenNotFoundError("Token not found!") + raise TokenNotFound("Token not found!") def get_tokens(self) -> list[Token]: """Get the tokens""" @@ -90,7 +89,7 @@ class JsonTokensRepository(AbstractTokensRepository): tokens_file["tokens"].remove(userdata_token) return - raise TokenNotFoundError("Token not found!") + raise TokenNotFound("Token not found!") def refresh_token(self, input_token: Token) -> Token: """Change the token field of the existing token""" @@ -107,13 +106,16 @@ class JsonTokensRepository(AbstractTokensRepository): return new_token - raise TokenNotFoundError("Token not found!") + raise TokenNotFound("Token not found!") def get_recovery_key(self) -> Optional[RecoveryKey]: """Get the recovery key""" with ReadUserData(UserDataFiles.TOKENS) as tokens_file: - if tokens_file["recovery_token"] is None: + if ( + "recovery_token" not in tokens_file + or tokens_file["recovery_token"] is None + ): return recovery_key = RecoveryKey( @@ -151,19 +153,19 @@ class JsonTokensRepository(AbstractTokensRepository): recovery_key = self.get_recovery_key() if recovery_key is None: - raise RecoveryKeyNotFoundError("Recovery key is None!") + raise RecoveryKeyNotFound("Recovery key not found") if not recovery_key.is_valid(): - raise RecoveryKeyIsNotValidError("Recovery key is not valid!") + raise RecoveryKeyNotFound("Recovery key not found") recovery_token = bytes.fromhex(recovery_key.key) if not Mnemonic(language="english").check(mnemonic_phrase): - raise MnemonicError("Phrase is not mnemonic!") + raise InvalidMnemonic("Phrase is not mnemonic!") phrase_bytes = Mnemonic(language="english").to_entropy(mnemonic_phrase) if phrase_bytes != recovery_token: - raise RecoveryTokenError("Phrase is not recovery token") + raise RecoveryKeyNotFound("Recovery key not found") new_token = Token.generate(device_name=device_name) @@ -204,33 +206,29 @@ class JsonTokensRepository(AbstractTokensRepository): del tokens_file["new_device"] return - raise TokenNotFoundError("Key not found!") - def use_mnemonic_new_device_key( self, mnemonic_phrase: str, device_name: str ) -> Token: """Use the mnemonic new device key""" - with WriteUserData(UserDataFiles.TOKENS) as tokens_file: + with ReadUserData(UserDataFiles.TOKENS) as tokens_file: + if "new_device" not in tokens_file or tokens_file["new_device"] is None: + raise NewDeviceKeyNotFound("New device key not found") + new_device_key = NewDeviceKey( key=tokens_file["new_device"]["token"], created_at=tokens_file["new_device"]["date"], expires_at=tokens_file["new_device"]["expiration"], ) - if new_device_key is None: - raise TokenNotFoundError("New device key not found!") - token = bytes.fromhex(new_device_key.key) if not Mnemonic(language="english").check(mnemonic_phrase): - raise MnemonicError("Phrase is not mnemonic!") + raise InvalidMnemonic("Phrase is not mnemonic!") phrase_bytes = Mnemonic(language="english").to_entropy(mnemonic_phrase) - if bytes(phrase_bytes) != bytes( - token - ): # idk why, но оно не робит, хотя оригинальную логику я сохранил - raise TokenNotFoundError("Phrase is not token!") + if bytes(phrase_bytes) != bytes(token): + raise NewDeviceKeyNotFound("Phrase is not token!") new_token = Token.generate(device_name=device_name) with WriteUserData(UserDataFiles.TOKENS) as tokens: diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index 8bdf1fb..878e242 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -10,11 +10,10 @@ from selfprivacy_api.models.tokens.new_device_key import NewDeviceKey from selfprivacy_api.models.tokens.recovery_key import RecoveryKey from selfprivacy_api.models.tokens.token import Token from selfprivacy_api.repositories.tokens.exceptions import ( - MnemonicError, - RecoveryKeyIsNotValidError, - RecoveryKeyNotFoundError, - RecoveryTokenError, - TokenNotFoundError, + InvalidMnemonic, + RecoveryKeyNotFound, + TokenNotFound, + NewDeviceKeyNotFound, ) from selfprivacy_api.repositories.tokens.json_tokens_repository import ( JsonTokensRepository, @@ -22,6 +21,58 @@ from selfprivacy_api.repositories.tokens.json_tokens_repository import ( from tests.common import read_json +ORIGINAL_TOKEN_CONTENT = [ + { + "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", + "name": "primary_token", + "date": "2022-07-15 17:41:31.675698", + }, + { + "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", + "name": "second_token", + "date": "2022-07-15 17:41:31.675698Z", + }, + { + "token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", + "name": "third_token", + "date": "2022-07-15T17:41:31.675698Z", + }, + { + "token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", + "name": "forth_token", + "date": "2022-07-15T17:41:31.675698", + }, +] + + +@pytest.fixture +def tokens(mocker, datadir): + mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=datadir / "tokens.json") + assert read_json(datadir / "tokens.json")["tokens"] == ORIGINAL_TOKEN_CONTENT + return datadir + + +@pytest.fixture +def empty_keys(mocker, datadir): + mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=datadir / "empty_keys.json") + assert read_json(datadir / "empty_keys.json")["tokens"] == [ + { + "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", + "name": "primary_token", + "date": "2022-07-15 17:41:31.675698", + } + ] + return datadir + + +@pytest.fixture +def null_keys(mocker, datadir): + mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=datadir / "null_keys.json") + assert read_json(datadir / "null_keys.json")["recovery_token"] is None + assert read_json(datadir / "null_keys.json")["new_device"] is None + return datadir + + class RecoveryKeyMockReturnNotValid: def is_valid() -> bool: return False @@ -41,16 +92,6 @@ def mock_new_device_key_generate(mocker): return mock -@pytest.fixture -def mock_get_recovery_key_return_none(mocker): - mock = mocker.patch( - "selfprivacy_api.repositories.tokens.json_tokens_repository.JsonTokensRepository.get_recovery_key", - autospec=True, - return_value=None, - ) - return mock - - @pytest.fixture def mock_generate_token(mocker): mock = mocker.patch( @@ -104,32 +145,9 @@ def mock_recovery_key_generate(mocker): return mock -@pytest.fixture -def tokens(mocker, datadir): - mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=datadir / "tokens.json") - assert read_json(datadir / "tokens.json")["tokens"] == [ - { - "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", - "name": "primary_token", - "date": "2022-07-15 17:41:31.675698", - }, - { - "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", - "name": "second_token", - "date": "2022-07-15 17:41:31.675698Z", - }, - { - "token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", - "name": "third_token", - "date": "2022-07-15T17:41:31.675698Z", - }, - { - "token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", - "name": "forth_token", - "date": "2022-07-15T17:41:31.675698", - }, - ] - return datadir +############### +# Test tokens # +############### def test_get_token_by_token_string(tokens): @@ -147,7 +165,7 @@ def test_get_token_by_token_string(tokens): def test_get_token_by_non_existent_token_string(tokens): repo = JsonTokensRepository() - with pytest.raises(TokenNotFoundError): + with pytest.raises(TokenNotFound): assert repo.get_token_by_token_string(token_string="iamBadtoken") is None @@ -165,14 +183,13 @@ def test_get_token_by_name(tokens): def test_get_token_by_non_existent_name(tokens): repo = JsonTokensRepository() - with pytest.raises(TokenNotFoundError): + with pytest.raises(TokenNotFound): assert repo.get_token_by_name(token_name="badname") is None def test_get_tokens(tokens): repo = JsonTokensRepository() - assert repo.get_tokens() is not None assert repo.get_tokens() == [ Token( token="KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", @@ -197,6 +214,18 @@ def test_get_tokens(tokens): ] +def test_get_tokens_when_one(empty_keys): + repo = JsonTokensRepository() + + assert repo.get_tokens() == [ + Token( + token="KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", + device_name="primary_token", + created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + ) + ] + + def test_create_token(tokens, mock_token_generate): repo = JsonTokensRepository() @@ -207,7 +236,7 @@ def test_create_token(tokens, mock_token_generate): ) -def test_delete_token(tokens, datadir): +def test_delete_token(tokens): repo = JsonTokensRepository() input_token = Token( token="KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", @@ -215,8 +244,8 @@ def test_delete_token(tokens, datadir): created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), ) - assert repo.delete_token(input_token) is None - assert read_json(datadir / "tokens.json")["tokens"] == [ + repo.delete_token(input_token) + assert read_json(tokens / "tokens.json")["tokens"] == [ { "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", "name": "second_token", @@ -235,16 +264,18 @@ def test_delete_token(tokens, datadir): ] -def test_delete_not_found_token(tokens, datadir): +def test_delete_not_found_token(tokens): repo = JsonTokensRepository() input_token = Token( token="imbadtoken", device_name="primary_token", created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), ) - with pytest.raises(TokenNotFoundError): + with pytest.raises(TokenNotFound): assert repo.delete_token(input_token) is None + assert read_json(tokens / "tokens.json")["tokens"] == ORIGINAL_TOKEN_CONTENT + def test_refresh_token(tokens, mock_token_generate): repo = JsonTokensRepository() @@ -269,14 +300,18 @@ def test_refresh_not_found_token(tokens, mock_token_generate): created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), ) - with pytest.raises(TokenNotFoundError): + with pytest.raises(TokenNotFound): assert repo.refresh_token(input_token) is None +################ +# Recovery key # +################ + + def test_get_recovery_key(tokens): repo = JsonTokensRepository() - assert repo.get_recovery_key() is not None assert repo.get_recovery_key() == RecoveryKey( key="ed653e4b8b042b841d285fa7a682fa09e925ddb2d8906f54", created_at=datetime(2022, 11, 11, 11, 48, 54, 228038), @@ -285,11 +320,17 @@ def test_get_recovery_key(tokens): ) -def test_create_recovery_key(tokens, mock_recovery_key_generate, datadir): +def test_get_recovery_key_when_empty(empty_keys): + repo = JsonTokensRepository() + + assert repo.get_recovery_key() is None + + +def test_create_recovery_key(tokens, mock_recovery_key_generate): repo = JsonTokensRepository() assert repo.create_recovery_key(uses_left=1, expiration=None) is not None - assert read_json(datadir / "tokens.json")["recovery_token"] == { + assert read_json(tokens / "tokens.json")["recovery_token"] == { "token": "889bf49c1d3199d71a2e704718772bd53a422020334db051", "date": "2022-07-15T17:41:31.675698", "expiration": None, @@ -297,122 +338,12 @@ def test_create_recovery_key(tokens, mock_recovery_key_generate, datadir): } -def test_get_new_device_key(tokens, mock_new_device_key_generate, datadir): - repo = JsonTokensRepository() - - assert repo.get_new_device_key() is not None - assert read_json(datadir / "tokens.json")["new_device"] == { - "date": "2022-07-15T17:41:31.675698", - "expiration": "2022-07-15T17:41:31.675698", - "token": "43478d05b35e4781598acd76e33832bb", - } - - -def test_delete_new_device_key(tokens, datadir): - repo = JsonTokensRepository() - - assert repo.delete_new_device_key() is None - assert "new_device" not in read_json(datadir / "tokens.json") - - -#################################################### - - -def test_use_bad_mnemonic_phrase_new_device_key( - tokens, mock_new_device_key_generate, datadir, mock_token_generate +def test_use_mnemonic_recovery_key_when_empty( + empty_keys, mock_recovery_key_generate, mock_token_generate ): repo = JsonTokensRepository() - with pytest.raises(MnemonicError): - assert ( - repo.use_mnemonic_new_device_key( - device_name="imnew", - mnemonic_phrase="oh-no", - ) - is None - ) - - -def test_use_not_exists_mnemonic_new_device_key( - tokens, mock_new_device_key_generate, datadir, mock_token_generate -): - repo = JsonTokensRepository() - - with pytest.raises(TokenNotFoundError): - assert ( - repo.use_mnemonic_new_device_key( - device_name="imnew", - mnemonic_phrase="uniform clarify napkin bid dress search input armor police cross salon because myself uphold slice bamboo hungry park", - ) - is None - ) - - -def test_use_mnemonic_new_device_key( - tokens, mock_new_device_key_generate, datadir, mock_token_generate -): - repo = JsonTokensRepository() - - assert ( - repo.use_mnemonic_new_device_key( - device_name="imnew", - mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb", - ) - is not None - ) - # assert read_json(datadir / "tokens.json")["new_device"] == [] - - -def test_use_none_mnemonic_recovery_key( - datadir, tokens, mock_get_recovery_key_return_none -): - repo = JsonTokensRepository() - - with pytest.raises(RecoveryKeyNotFoundError): - assert ( - repo.use_mnemonic_recovery_key( - mnemonic_phrase="i love you", - device_name="primary_token", - ) - is None - ) - - -def test_use_mnemonic_not_valid_recovery_key( - datadir, tokens, mock_get_recovery_key_return_not_valid -): - repo = JsonTokensRepository() - - with pytest.raises(RecoveryKeyIsNotValidError): - assert ( - repo.use_mnemonic_recovery_key( - mnemonic_phrase="sorry, it was joke", - device_name="primary_token", - ) - is None - ) - - -def test_use_not_mnemonic_recovery_key( - datadir, - tokens, -): - repo = JsonTokensRepository() - - with pytest.raises(MnemonicError): - assert ( - repo.use_mnemonic_recovery_key( - mnemonic_phrase="please come back", - device_name="primary_token", - ) - is None - ) - - -def test_use_not_found_mnemonic_recovery_key(datadir, tokens): - repo = JsonTokensRepository() - - with pytest.raises(RecoveryTokenError): + with pytest.raises(RecoveryKeyNotFound): assert ( repo.use_mnemonic_recovery_key( mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb", @@ -422,7 +353,87 @@ def test_use_not_found_mnemonic_recovery_key(datadir, tokens): ) -def test_use_mnemonic_recovery_key(datadir, tokens, mock_generate_token): +def test_use_mnemonic_not_valid_recovery_key( + tokens, mock_get_recovery_key_return_not_valid +): + repo = JsonTokensRepository() + + with pytest.raises(RecoveryKeyNotFound): + assert ( + repo.use_mnemonic_recovery_key( + mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb", + device_name="primary_token", + ) + is None + ) + + +def test_use_mnemonic_not_mnemonic_recovery_key(tokens): + repo = JsonTokensRepository() + + with pytest.raises(InvalidMnemonic): + assert ( + repo.use_mnemonic_recovery_key( + mnemonic_phrase="sorry, it was joke", + device_name="primary_token", + ) + is None + ) + + +def test_use_not_mnemonic_recovery_key(tokens): + repo = JsonTokensRepository() + + with pytest.raises(InvalidMnemonic): + assert ( + repo.use_mnemonic_recovery_key( + mnemonic_phrase="please come back", + device_name="primary_token", + ) + is None + ) + + +def test_use_not_found_mnemonic_recovery_key(tokens): + repo = JsonTokensRepository() + + with pytest.raises(RecoveryKeyNotFound): + assert ( + repo.use_mnemonic_recovery_key( + mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb", + device_name="primary_token", + ) + is None + ) + + +def test_use_menemonic_recovery_key_when_empty(empty_keys): + repo = JsonTokensRepository() + + with pytest.raises(RecoveryKeyNotFound): + assert ( + repo.use_mnemonic_recovery_key( + mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb", + device_name="primary_token", + ) + is None + ) + + +def test_use_menemonic_recovery_key_when_null(null_keys): + repo = JsonTokensRepository() + + with pytest.raises(RecoveryKeyNotFound): + assert ( + repo.use_mnemonic_recovery_key( + mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb", + device_name="primary_token", + ) + is None + ) + + +def test_use_mnemonic_recovery_key(tokens, mock_generate_token): repo = JsonTokensRepository() assert repo.use_mnemonic_recovery_key( @@ -434,7 +445,7 @@ def test_use_mnemonic_recovery_key(datadir, tokens, mock_generate_token): created_at=datetime(2022, 11, 14, 6, 6, 32, 777123), ) - assert read_json(datadir / "tokens.json")["tokens"] == [ + assert read_json(tokens / "tokens.json")["tokens"] == [ { "date": "2022-07-15 17:41:31.675698", "name": "primary_token", @@ -462,9 +473,110 @@ def test_use_mnemonic_recovery_key(datadir, tokens, mock_generate_token): }, ] - assert read_json(datadir / "tokens.json")["recovery_token"] == { + assert read_json(tokens / "tokens.json")["recovery_token"] == { "date": "2022-11-11T11:48:54.228038", "expiration": None, "token": "ed653e4b8b042b841d285fa7a682fa09e925ddb2d8906f54", "uses_left": 1, } + + +################## +# New device key # +################## + + +def test_get_new_device_key(tokens, mock_new_device_key_generate): + repo = JsonTokensRepository() + + assert repo.get_new_device_key() is not None + assert read_json(tokens / "tokens.json")["new_device"] == { + "date": "2022-07-15T17:41:31.675698", + "expiration": "2022-07-15T17:41:31.675698", + "token": "43478d05b35e4781598acd76e33832bb", + } + + +def test_delete_new_device_key(tokens): + repo = JsonTokensRepository() + + assert repo.delete_new_device_key() is None + assert "new_device" not in read_json(tokens / "tokens.json") + + +def test_delete_new_device_key_when_empty(empty_keys): + repo = JsonTokensRepository() + + repo.delete_new_device_key() + assert "new_device" not in read_json(empty_keys / "empty_keys.json") + + +def test_use_invalid_mnemonic_new_device_key( + tokens, mock_new_device_key_generate, datadir, mock_token_generate +): + repo = JsonTokensRepository() + + with pytest.raises(InvalidMnemonic): + assert ( + repo.use_mnemonic_new_device_key( + device_name="imnew", + mnemonic_phrase="oh-no", + ) + is None + ) + + +def test_use_not_exists_mnemonic_new_device_key( + tokens, mock_new_device_key_generate, mock_token_generate +): + repo = JsonTokensRepository() + + with pytest.raises(NewDeviceKeyNotFound): + assert ( + repo.use_mnemonic_new_device_key( + device_name="imnew", + mnemonic_phrase="uniform clarify napkin bid dress search input armor police cross salon because myself uphold slice bamboo hungry park", + ) + is None + ) + + +def test_use_mnemonic_new_device_key( + tokens, mock_new_device_key_generate, mock_token_generate +): + repo = JsonTokensRepository() + + assert ( + repo.use_mnemonic_new_device_key( + device_name="imnew", + mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb", + ) + is not None + ) + # assert read_json(datadir / "tokens.json")["new_device"] == [] + + +def test_use_mnemonic_new_device_key_when_empty(empty_keys): + repo = JsonTokensRepository() + + with pytest.raises(NewDeviceKeyNotFound): + assert ( + repo.use_mnemonic_new_device_key( + device_name="imnew", + mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb", + ) + is None + ) + + +def test_use_mnemonic_new_device_key_when_null(null_keys): + repo = JsonTokensRepository() + + with pytest.raises(NewDeviceKeyNotFound): + assert ( + repo.use_mnemonic_new_device_key( + device_name="imnew", + mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb", + ) + is None + ) diff --git a/tests/test_graphql/test_repository/test_tokens_repository/empty_keys.json b/tests/test_graphql/test_repository/test_tokens_repository/empty_keys.json new file mode 100644 index 0000000..2131ddf --- /dev/null +++ b/tests/test_graphql/test_repository/test_tokens_repository/empty_keys.json @@ -0,0 +1,9 @@ +{ + "tokens": [ + { + "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", + "name": "primary_token", + "date": "2022-07-15 17:41:31.675698" + } + ] +} diff --git a/tests/test_graphql/test_repository/test_tokens_repository/null_keys.json b/tests/test_graphql/test_repository/test_tokens_repository/null_keys.json new file mode 100644 index 0000000..45e6f90 --- /dev/null +++ b/tests/test_graphql/test_repository/test_tokens_repository/null_keys.json @@ -0,0 +1,26 @@ +{ + "tokens": [ + { + "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", + "name": "primary_token", + "date": "2022-07-15 17:41:31.675698" + }, + { + "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", + "name": "second_token", + "date": "2022-07-15 17:41:31.675698Z" + }, + { + "token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", + "name": "third_token", + "date": "2022-07-15T17:41:31.675698Z" + }, + { + "token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", + "name": "forth_token", + "date": "2022-07-15T17:41:31.675698" + } + ], + "recovery_token": null, + "new_device": null +} -- 2.42.0