diff --git a/tests/test_graphql/test_repository/test_json_tokens_repository.py b/tests/test_graphql/test_repository/test_json_tokens_repository.py index af8c844..23df9df 100644 --- a/tests/test_graphql/test_repository/test_json_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_json_tokens_repository.py @@ -25,7 +25,6 @@ from test_tokens_repository import ( mock_recovery_key_generate, mock_generate_token, mock_new_device_key_generate, - empty_keys, ) ORIGINAL_TOKEN_CONTENT = [ @@ -51,6 +50,18 @@ ORIGINAL_TOKEN_CONTENT = [ }, ] +EMPTY_KEYS_JSON = """ +{ + "tokens": [ + { + "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", + "name": "primary_token", + "date": "2022-07-15 17:41:31.675698" + } + ] +} +""" + @pytest.fixture def tokens(mocker, datadir): @@ -59,6 +70,22 @@ def tokens(mocker, datadir): return datadir +@pytest.fixture +def empty_keys(mocker, tmpdir): + tokens_file = tmpdir / "empty_keys.json" + with open(tokens_file, "w") as file: + file.write(EMPTY_KEYS_JSON) + mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=tokens_file) + assert read_json(tokens_file)["tokens"] == [ + { + "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", + "name": "primary_token", + "date": "2022-07-15 17:41:31.675698", + } + ] + return tmpdir + + @pytest.fixture def null_keys(mocker, datadir): mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=datadir / "null_keys.json") diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index ee1b9e0..b172f13 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -32,17 +32,6 @@ ORIGINAL_DEVICE_NAMES = [ "forth_token", ] -EMPTY_KEYS_JSON = """ -{ - "tokens": [ - { - "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", - "name": "primary_token", - "date": "2022-07-15 17:41:31.675698" - } - ] -} -""" EMPTY_TOKENS_JSON = ' {"tokens": []}' @@ -51,22 +40,6 @@ def mnemonic_from_hex(hexkey): return Mnemonic(language="english").to_mnemonic(bytes.fromhex(hexkey)) -@pytest.fixture -def empty_keys(mocker, tmpdir): - tokens_file = tmpdir / "empty_keys.json" - with open(tokens_file, "w") as file: - file.write(EMPTY_KEYS_JSON) - mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=tokens_file) - assert read_json(tokens_file)["tokens"] == [ - { - "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", - "name": "primary_token", - "date": "2022-07-15 17:41:31.675698", - } - ] - return tmpdir - - @pytest.fixture def empty_tokens(mocker, tmpdir): tokens_file = tmpdir / "empty_tokens.json"