test(tokens-repo): offload empty_keys fixture to json tests

This commit is contained in:
Houkime 2022-12-28 11:14:36 +00:00
parent 1768fe278a
commit 40d331d01f
2 changed files with 28 additions and 28 deletions

View file

@ -25,7 +25,6 @@ from test_tokens_repository import (
mock_recovery_key_generate, mock_recovery_key_generate,
mock_generate_token, mock_generate_token,
mock_new_device_key_generate, mock_new_device_key_generate,
empty_keys,
) )
ORIGINAL_TOKEN_CONTENT = [ ORIGINAL_TOKEN_CONTENT = [
@ -51,6 +50,18 @@ ORIGINAL_TOKEN_CONTENT = [
}, },
] ]
EMPTY_KEYS_JSON = """
{
"tokens": [
{
"token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI",
"name": "primary_token",
"date": "2022-07-15 17:41:31.675698"
}
]
}
"""
@pytest.fixture @pytest.fixture
def tokens(mocker, datadir): def tokens(mocker, datadir):
@ -59,6 +70,22 @@ def tokens(mocker, datadir):
return datadir return datadir
@pytest.fixture
def empty_keys(mocker, tmpdir):
tokens_file = tmpdir / "empty_keys.json"
with open(tokens_file, "w") as file:
file.write(EMPTY_KEYS_JSON)
mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=tokens_file)
assert read_json(tokens_file)["tokens"] == [
{
"token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI",
"name": "primary_token",
"date": "2022-07-15 17:41:31.675698",
}
]
return tmpdir
@pytest.fixture @pytest.fixture
def null_keys(mocker, datadir): def null_keys(mocker, datadir):
mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=datadir / "null_keys.json") mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=datadir / "null_keys.json")

View file

@ -32,17 +32,6 @@ ORIGINAL_DEVICE_NAMES = [
"forth_token", "forth_token",
] ]
EMPTY_KEYS_JSON = """
{
"tokens": [
{
"token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI",
"name": "primary_token",
"date": "2022-07-15 17:41:31.675698"
}
]
}
"""
EMPTY_TOKENS_JSON = ' {"tokens": []}' EMPTY_TOKENS_JSON = ' {"tokens": []}'
@ -51,22 +40,6 @@ def mnemonic_from_hex(hexkey):
return Mnemonic(language="english").to_mnemonic(bytes.fromhex(hexkey)) return Mnemonic(language="english").to_mnemonic(bytes.fromhex(hexkey))
@pytest.fixture
def empty_keys(mocker, tmpdir):
tokens_file = tmpdir / "empty_keys.json"
with open(tokens_file, "w") as file:
file.write(EMPTY_KEYS_JSON)
mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=tokens_file)
assert read_json(tokens_file)["tokens"] == [
{
"token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI",
"name": "primary_token",
"date": "2022-07-15 17:41:31.675698",
}
]
return tmpdir
@pytest.fixture @pytest.fixture
def empty_tokens(mocker, tmpdir): def empty_tokens(mocker, tmpdir):
tokens_file = tmpdir / "empty_tokens.json" tokens_file = tmpdir / "empty_tokens.json"