test(tokens-repo): make shared test token state use token repo api for loading

redis/connection-pool
Houkime 2023-01-02 17:22:18 +00:00 committed by Inex Code
parent 8f645113e2
commit 824b018487
3 changed files with 64 additions and 37 deletions

View File

@ -4,8 +4,34 @@
import os
import pytest
from fastapi.testclient import TestClient
from shutil import copy
import os.path as path
import datetime
# from selfprivacy_api.actions.api_tokens import TOKEN_REPO
from selfprivacy_api.models.tokens.token import Token
from selfprivacy_api.repositories.tokens.json_tokens_repository import (
JsonTokensRepository,
)
from tests.common import read_json
EMPTY_TOKENS_JSON = ' {"tokens": []}'
TOKENS_FILE_CONTENTS = {
"tokens": [
{
"token": "TEST_TOKEN",
"name": "test_token",
"date": datetime.datetime(2022, 1, 14, 8, 31, 10, 789314),
},
{
"token": "TEST_TOKEN2",
"name": "test_token2",
"date": datetime.datetime(2022, 1, 14, 8, 31, 10, 789314),
},
]
}
def pytest_generate_tests(metafunc):
@ -17,13 +43,40 @@ def global_data_dir():
@pytest.fixture
def tokens_file(mocker, tmpdir):
"""Mock tokens file."""
tmp_file = tmpdir / "tokens.json"
source_file = path.join(global_data_dir(), "tokens.json")
copy(source_file, tmp_file)
mock = mocker.patch("selfprivacy_api.utils.TOKENS_FILE", tmp_file)
return mock
def empty_tokens(mocker, tmpdir):
tokenfile = tmpdir / "empty_tokens.json"
with open(tokenfile, "w") as file:
file.write(EMPTY_TOKENS_JSON)
mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=tokenfile)
assert read_json(tokenfile)["tokens"] == []
return tmpdir
@pytest.fixture
def empty_json_repo(empty_tokens):
repo = JsonTokensRepository()
for token in repo.get_tokens():
repo.delete_token(token)
assert repo.get_tokens() == []
return repo
@pytest.fixture
def tokens_file(empty_json_repo, tmpdir):
"""A state with tokens"""
for token in TOKENS_FILE_CONTENTS["tokens"]:
empty_json_repo._store_token(
Token(
token=token["token"],
device_name=token["name"],
created_at=token["date"],
)
)
# temporary return for compatibility with older tests
tokenfile = tmpdir / "empty_tokens.json"
assert path.exists(tokenfile)
return tokenfile
@pytest.fixture

View File

@ -17,12 +17,12 @@ TOKENS_FILE_CONTETS = {
{
"token": "TEST_TOKEN",
"name": "test_token",
"date": "2022-01-14 08:31:10.789314",
"date": "2022-01-14T08:31:10.789314",
},
{
"token": "TEST_TOKEN2",
"name": "test_token2",
"date": "2022-01-14 08:31:10.789314",
"date": "2022-01-14T08:31:10.789314",
},
]
}
@ -118,7 +118,7 @@ def test_graphql_delete_token(authorized_client, tokens_file):
{
"token": "TEST_TOKEN",
"name": "test_token",
"date": "2022-01-14 08:31:10.789314",
"date": "2022-01-14T08:31:10.789314",
}
]
}

View File

@ -16,13 +16,9 @@ from selfprivacy_api.repositories.tokens.exceptions import (
TokenNotFound,
NewDeviceKeyNotFound,
)
from selfprivacy_api.repositories.tokens.json_tokens_repository import (
JsonTokensRepository,
)
from selfprivacy_api.repositories.tokens.redis_tokens_repository import (
RedisTokensRepository,
)
from tests.common import read_json
ORIGINAL_DEVICE_NAMES = [
@ -33,23 +29,10 @@ ORIGINAL_DEVICE_NAMES = [
]
EMPTY_TOKENS_JSON = ' {"tokens": []}'
def mnemonic_from_hex(hexkey):
return Mnemonic(language="english").to_mnemonic(bytes.fromhex(hexkey))
@pytest.fixture
def empty_tokens(mocker, tmpdir):
tokens_file = tmpdir / "empty_tokens.json"
with open(tokens_file, "w") as file:
file.write(EMPTY_TOKENS_JSON)
mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=tokens_file)
assert read_json(tokens_file)["tokens"] == []
return tmpdir
@pytest.fixture
def mock_new_device_key_generate(mocker):
mock = mocker.patch(
@ -137,15 +120,6 @@ def mock_recovery_key_generate(mocker):
return mock
@pytest.fixture
def empty_json_repo(empty_tokens):
repo = JsonTokensRepository()
for token in repo.get_tokens():
repo.delete_token(token)
assert repo.get_tokens() == []
return repo
@pytest.fixture
def empty_redis_repo():
repo = RedisTokensRepository()