From 3e7ea01a421ff62baba855b77f95ae974541a8d7 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 28 Dec 2022 11:00:55 +0000 Subject: [PATCH 001/246] test(tokens-repo): make empty_keys fixture (and derived) shareable --- .../empty_keys.json | 9 -------- .../test_repository/test_tokens_repository.py | 23 +++++++++++++++---- .../test_tokens_repository/empty_keys.json | 9 -------- 3 files changed, 19 insertions(+), 22 deletions(-) delete mode 100644 tests/test_graphql/test_repository/test_json_tokens_repository/empty_keys.json delete mode 100644 tests/test_graphql/test_repository/test_tokens_repository/empty_keys.json diff --git a/tests/test_graphql/test_repository/test_json_tokens_repository/empty_keys.json b/tests/test_graphql/test_repository/test_json_tokens_repository/empty_keys.json deleted file mode 100644 index 2131ddf..0000000 --- a/tests/test_graphql/test_repository/test_json_tokens_repository/empty_keys.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "tokens": [ - { - "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", - "name": "primary_token", - "date": "2022-07-15 17:41:31.675698" - } - ] -} diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index 020a868..8b8b089 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -32,22 +32,37 @@ ORIGINAL_DEVICE_NAMES = [ "forth_token", ] +EMPTY_KEYS_JSON = """ +{ + "tokens": [ + { + "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", + "name": "primary_token", + "date": "2022-07-15 17:41:31.675698" + } + ] +} +""" + def mnemonic_from_hex(hexkey): return Mnemonic(language="english").to_mnemonic(bytes.fromhex(hexkey)) @pytest.fixture -def empty_keys(mocker, datadir): - mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=datadir / "empty_keys.json") - assert read_json(datadir / "empty_keys.json")["tokens"] == [ +def empty_keys(mocker, tmpdir): + tokens_file = tmpdir / "empty_keys.json" + with open(tokens_file, "w") as file: + file.write(EMPTY_KEYS_JSON) + mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=tokens_file) + assert read_json(tokens_file)["tokens"] == [ { "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", "name": "primary_token", "date": "2022-07-15 17:41:31.675698", } ] - return datadir + return tmpdir @pytest.fixture diff --git a/tests/test_graphql/test_repository/test_tokens_repository/empty_keys.json b/tests/test_graphql/test_repository/test_tokens_repository/empty_keys.json deleted file mode 100644 index 2131ddf..0000000 --- a/tests/test_graphql/test_repository/test_tokens_repository/empty_keys.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "tokens": [ - { - "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", - "name": "primary_token", - "date": "2022-07-15 17:41:31.675698" - } - ] -} From 8065921862fa986a5aec387f76bea950b851e7c2 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 28 Dec 2022 11:09:24 +0000 Subject: [PATCH 002/246] test(tokens-repo): make empty_tokens fixture, even more minimal --- .../test_repository/test_tokens_repository.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index 8b8b089..ee1b9e0 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -44,6 +44,8 @@ EMPTY_KEYS_JSON = """ } """ +EMPTY_TOKENS_JSON = ' {"tokens": []}' + def mnemonic_from_hex(hexkey): return Mnemonic(language="english").to_mnemonic(bytes.fromhex(hexkey)) @@ -65,6 +67,16 @@ def empty_keys(mocker, tmpdir): return tmpdir +@pytest.fixture +def empty_tokens(mocker, tmpdir): + tokens_file = tmpdir / "empty_tokens.json" + with open(tokens_file, "w") as file: + file.write(EMPTY_TOKENS_JSON) + mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=tokens_file) + assert read_json(tokens_file)["tokens"] == [] + return tmpdir + + @pytest.fixture def mock_new_device_key_generate(mocker): mock = mocker.patch( @@ -153,7 +165,7 @@ def mock_recovery_key_generate(mocker): @pytest.fixture -def empty_json_repo(empty_keys): +def empty_json_repo(empty_tokens): repo = JsonTokensRepository() for token in repo.get_tokens(): repo.delete_token(token) From 889c7eee6a29daf5a230fd5abaa8ce9a100386b7 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 28 Dec 2022 11:14:36 +0000 Subject: [PATCH 003/246] test(tokens-repo): offload empty_keys fixture to json tests --- .../test_json_tokens_repository.py | 29 ++++++++++++++++++- .../test_repository/test_tokens_repository.py | 27 ----------------- 2 files changed, 28 insertions(+), 28 deletions(-) diff --git a/tests/test_graphql/test_repository/test_json_tokens_repository.py b/tests/test_graphql/test_repository/test_json_tokens_repository.py index af8c844..23df9df 100644 --- a/tests/test_graphql/test_repository/test_json_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_json_tokens_repository.py @@ -25,7 +25,6 @@ from test_tokens_repository import ( mock_recovery_key_generate, mock_generate_token, mock_new_device_key_generate, - empty_keys, ) ORIGINAL_TOKEN_CONTENT = [ @@ -51,6 +50,18 @@ ORIGINAL_TOKEN_CONTENT = [ }, ] +EMPTY_KEYS_JSON = """ +{ + "tokens": [ + { + "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", + "name": "primary_token", + "date": "2022-07-15 17:41:31.675698" + } + ] +} +""" + @pytest.fixture def tokens(mocker, datadir): @@ -59,6 +70,22 @@ def tokens(mocker, datadir): return datadir +@pytest.fixture +def empty_keys(mocker, tmpdir): + tokens_file = tmpdir / "empty_keys.json" + with open(tokens_file, "w") as file: + file.write(EMPTY_KEYS_JSON) + mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=tokens_file) + assert read_json(tokens_file)["tokens"] == [ + { + "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", + "name": "primary_token", + "date": "2022-07-15 17:41:31.675698", + } + ] + return tmpdir + + @pytest.fixture def null_keys(mocker, datadir): mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=datadir / "null_keys.json") diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index ee1b9e0..b172f13 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -32,17 +32,6 @@ ORIGINAL_DEVICE_NAMES = [ "forth_token", ] -EMPTY_KEYS_JSON = """ -{ - "tokens": [ - { - "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", - "name": "primary_token", - "date": "2022-07-15 17:41:31.675698" - } - ] -} -""" EMPTY_TOKENS_JSON = ' {"tokens": []}' @@ -51,22 +40,6 @@ def mnemonic_from_hex(hexkey): return Mnemonic(language="english").to_mnemonic(bytes.fromhex(hexkey)) -@pytest.fixture -def empty_keys(mocker, tmpdir): - tokens_file = tmpdir / "empty_keys.json" - with open(tokens_file, "w") as file: - file.write(EMPTY_KEYS_JSON) - mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=tokens_file) - assert read_json(tokens_file)["tokens"] == [ - { - "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", - "name": "primary_token", - "date": "2022-07-15 17:41:31.675698", - } - ] - return tmpdir - - @pytest.fixture def empty_tokens(mocker, tmpdir): tokens_file = tmpdir / "empty_tokens.json" From e125f3a4b18f28e20bc292cd53779fea20decaec Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 28 Dec 2022 12:44:51 +0000 Subject: [PATCH 004/246] test(tokens-repo): remove test tokens.json files except for one which will temporarily remain gitkeeps are to prevent shared_datadir from erroring out in a freshly cloned repo. for now huey database and jobs fixtures use shared_datadir --- tests/conftest.py | 15 +++++++++++---- tests/test_graphql/data/gitkeep | 0 tests/test_graphql/data/tokens.json | 14 -------------- tests/test_rest_endpoints/data/tokens.json | 14 -------------- .../test_rest_endpoints/services/data/tokens.json | 9 --------- tests/test_rest_endpoints/services/gitkeep | 0 6 files changed, 11 insertions(+), 41 deletions(-) create mode 100644 tests/test_graphql/data/gitkeep delete mode 100644 tests/test_graphql/data/tokens.json delete mode 100644 tests/test_rest_endpoints/data/tokens.json delete mode 100644 tests/test_rest_endpoints/services/data/tokens.json create mode 100644 tests/test_rest_endpoints/services/gitkeep diff --git a/tests/conftest.py b/tests/conftest.py index ea7a66a..4b65d20 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -4,18 +4,25 @@ import os import pytest from fastapi.testclient import TestClient +from shutil import copy +import os.path as path def pytest_generate_tests(metafunc): os.environ["TEST_MODE"] = "true" +def global_data_dir(): + return path.join(path.dirname(__file__), "data") + + @pytest.fixture -def tokens_file(mocker, shared_datadir): +def tokens_file(mocker, tmpdir): """Mock tokens file.""" - mock = mocker.patch( - "selfprivacy_api.utils.TOKENS_FILE", shared_datadir / "tokens.json" - ) + tmp_file = tmpdir / "tokens.json" + source_file = path.join(global_data_dir(), "tokens.json") + copy(source_file, tmp_file) + mock = mocker.patch("selfprivacy_api.utils.TOKENS_FILE", tmp_file) return mock diff --git a/tests/test_graphql/data/gitkeep b/tests/test_graphql/data/gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/tests/test_graphql/data/tokens.json b/tests/test_graphql/data/tokens.json deleted file mode 100644 index 9be9d02..0000000 --- a/tests/test_graphql/data/tokens.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "tokens": [ - { - "token": "TEST_TOKEN", - "name": "test_token", - "date": "2022-01-14 08:31:10.789314" - }, - { - "token": "TEST_TOKEN2", - "name": "test_token2", - "date": "2022-01-14 08:31:10.789314" - } - ] -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/data/tokens.json b/tests/test_rest_endpoints/data/tokens.json deleted file mode 100644 index 9be9d02..0000000 --- a/tests/test_rest_endpoints/data/tokens.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "tokens": [ - { - "token": "TEST_TOKEN", - "name": "test_token", - "date": "2022-01-14 08:31:10.789314" - }, - { - "token": "TEST_TOKEN2", - "name": "test_token2", - "date": "2022-01-14 08:31:10.789314" - } - ] -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/data/tokens.json b/tests/test_rest_endpoints/services/data/tokens.json deleted file mode 100644 index 9d35420..0000000 --- a/tests/test_rest_endpoints/services/data/tokens.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "tokens": [ - { - "token": "TEST_TOKEN", - "name": "Test Token", - "date": "2022-01-14 08:31:10.789314" - } - ] -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/gitkeep b/tests/test_rest_endpoints/services/gitkeep new file mode 100644 index 0000000..e69de29 From f542c1e6c78f63de07cfcc0c6d9fb4976d27644c Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 28 Dec 2022 14:51:25 +0000 Subject: [PATCH 005/246] test(tokens-repo): break out assert_original() in rest --- tests/test_rest_endpoints/test_auth.py | 26 +++++++++++++++----------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/tests/test_rest_endpoints/test_auth.py b/tests/test_rest_endpoints/test_auth.py index 12de0cf..bb322e9 100644 --- a/tests/test_rest_endpoints/test_auth.py +++ b/tests/test_rest_endpoints/test_auth.py @@ -37,6 +37,10 @@ DATE_FORMATS = [ ] +def assert_original(filename): + assert read_json(filename) == TOKENS_FILE_CONTETS + + def test_get_tokens_info(authorized_client, tokens_file): response = authorized_client.get("/auth/tokens") assert response.status_code == 200 @@ -58,7 +62,7 @@ def test_get_tokens_unauthorized(client, tokens_file): def test_delete_token_unauthorized(client, tokens_file): response = client.delete("/auth/tokens") assert response.status_code == 401 - assert read_json(tokens_file) == TOKENS_FILE_CONTETS + assert_original(tokens_file) def test_delete_token(authorized_client, tokens_file): @@ -82,7 +86,7 @@ def test_delete_self_token(authorized_client, tokens_file): "/auth/tokens", json={"token_name": "test_token"} ) assert response.status_code == 400 - assert read_json(tokens_file) == TOKENS_FILE_CONTETS + assert_original(tokens_file) def test_delete_nonexistent_token(authorized_client, tokens_file): @@ -90,13 +94,13 @@ def test_delete_nonexistent_token(authorized_client, tokens_file): "/auth/tokens", json={"token_name": "test_token3"} ) assert response.status_code == 404 - assert read_json(tokens_file) == TOKENS_FILE_CONTETS + assert_original(tokens_file) def test_refresh_token_unauthorized(client, tokens_file): response = client.post("/auth/tokens") assert response.status_code == 401 - assert read_json(tokens_file) == TOKENS_FILE_CONTETS + assert_original(tokens_file) def test_refresh_token(authorized_client, tokens_file): @@ -112,7 +116,7 @@ def test_refresh_token(authorized_client, tokens_file): def test_get_new_device_auth_token_unauthorized(client, tokens_file): response = client.post("/auth/new_device") assert response.status_code == 401 - assert read_json(tokens_file) == TOKENS_FILE_CONTETS + assert_original(tokens_file) def test_get_new_device_auth_token(authorized_client, tokens_file): @@ -133,13 +137,13 @@ def test_get_and_delete_new_device_token(authorized_client, tokens_file): "/auth/new_device", json={"token": response.json()["token"]} ) assert response.status_code == 200 - assert read_json(tokens_file) == TOKENS_FILE_CONTETS + assert_original(tokens_file) def test_delete_token_unauthenticated(client, tokens_file): response = client.delete("/auth/new_device") assert response.status_code == 401 - assert read_json(tokens_file) == TOKENS_FILE_CONTETS + assert_original(tokens_file) def test_get_and_authorize_new_device(client, authorized_client, tokens_file): @@ -163,7 +167,7 @@ def test_authorize_new_device_with_invalid_token(client, tokens_file): json={"token": "invalid_token", "device": "new_device"}, ) assert response.status_code == 404 - assert read_json(tokens_file) == TOKENS_FILE_CONTETS + assert_original(tokens_file) def test_get_and_authorize_used_token(client, authorized_client, tokens_file): @@ -214,7 +218,7 @@ def test_authorize_without_token(client, tokens_file): json={"device": "new_device"}, ) assert response.status_code == 422 - assert read_json(tokens_file) == TOKENS_FILE_CONTETS + assert_original(tokens_file) # Recovery tokens @@ -243,7 +247,7 @@ def test_authorize_without_token(client, tokens_file): def test_get_recovery_token_status_unauthorized(client, tokens_file): response = client.get("/auth/recovery_token") assert response.status_code == 401 - assert read_json(tokens_file) == TOKENS_FILE_CONTETS + assert_original(tokens_file) def test_get_recovery_token_when_none_exists(authorized_client, tokens_file): @@ -256,7 +260,7 @@ def test_get_recovery_token_when_none_exists(authorized_client, tokens_file): "expiration": None, "uses_left": None, } - assert read_json(tokens_file) == TOKENS_FILE_CONTETS + assert_original(tokens_file) def test_generate_recovery_token(authorized_client, client, tokens_file): From 7e0e6015cf2412cf9017f2acd6bc6dc2c2181add Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 28 Dec 2022 15:16:58 +0000 Subject: [PATCH 006/246] test(tokens-repo): break out rest_get_token_info() --- tests/test_rest_endpoints/test_auth.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/tests/test_rest_endpoints/test_auth.py b/tests/test_rest_endpoints/test_auth.py index bb322e9..bd4efae 100644 --- a/tests/test_rest_endpoints/test_auth.py +++ b/tests/test_rest_endpoints/test_auth.py @@ -41,10 +41,14 @@ def assert_original(filename): assert read_json(filename) == TOKENS_FILE_CONTETS -def test_get_tokens_info(authorized_client, tokens_file): - response = authorized_client.get("/auth/tokens") +def rest_get_tokens_info(client): + response = client.get("/auth/tokens") assert response.status_code == 200 - assert response.json() == [ + return response.json() + + +def test_get_tokens_info(authorized_client, tokens_file): + assert rest_get_tokens_info(authorized_client) == [ {"name": "test_token", "date": "2022-01-14T08:31:10.789314", "is_caller": True}, { "name": "test_token2", From 270e569af22e07512e7177f59e26fae0ae479d1d Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 28 Dec 2022 15:26:42 +0000 Subject: [PATCH 007/246] test(tokens-repo): use rest token info in token deletion test --- tests/test_rest_endpoints/test_auth.py | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/tests/test_rest_endpoints/test_auth.py b/tests/test_rest_endpoints/test_auth.py index bd4efae..6199265 100644 --- a/tests/test_rest_endpoints/test_auth.py +++ b/tests/test_rest_endpoints/test_auth.py @@ -74,15 +74,9 @@ def test_delete_token(authorized_client, tokens_file): "/auth/tokens", json={"token_name": "test_token2"} ) assert response.status_code == 200 - assert read_json(tokens_file) == { - "tokens": [ - { - "token": "TEST_TOKEN", - "name": "test_token", - "date": "2022-01-14 08:31:10.789314", - } - ] - } + assert rest_get_tokens_info(authorized_client) == [ + {"name": "test_token", "date": "2022-01-14T08:31:10.789314", "is_caller": True} + ] def test_delete_self_token(authorized_client, tokens_file): From 07fe2f8a558b6ada7896d157844e4f5273f40fcb Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 28 Dec 2022 15:43:43 +0000 Subject: [PATCH 008/246] test(tokens-repo): check refreshed token validity by trying to auth --- tests/test_rest_endpoints/test_auth.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/tests/test_rest_endpoints/test_auth.py b/tests/test_rest_endpoints/test_auth.py index 6199265..44b543d 100644 --- a/tests/test_rest_endpoints/test_auth.py +++ b/tests/test_rest_endpoints/test_auth.py @@ -5,11 +5,6 @@ import datetime import pytest from mnemonic import Mnemonic -from selfprivacy_api.repositories.tokens.json_tokens_repository import ( - JsonTokensRepository, -) - -TOKEN_REPO = JsonTokensRepository() from tests.common import read_json, write_json @@ -105,7 +100,8 @@ def test_refresh_token(authorized_client, tokens_file): response = authorized_client.post("/auth/tokens") assert response.status_code == 200 new_token = response.json()["token"] - assert TOKEN_REPO.get_token_by_token_string(new_token) is not None + authorized_client.headers.update({"Authorization": "Bearer " + new_token}) + assert rest_get_tokens_info(authorized_client) is not None # new device From 1d6275b75bbfc88d5a08a9136986db9ab939f803 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 28 Dec 2022 15:48:40 +0000 Subject: [PATCH 009/246] test(tokens-repo): delete standalone get new device test At rest api level, we can only check the existence of new device token by using it, and this test already exists. --- tests/test_rest_endpoints/test_auth.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/tests/test_rest_endpoints/test_auth.py b/tests/test_rest_endpoints/test_auth.py index 44b543d..3d6b256 100644 --- a/tests/test_rest_endpoints/test_auth.py +++ b/tests/test_rest_endpoints/test_auth.py @@ -113,14 +113,6 @@ def test_get_new_device_auth_token_unauthorized(client, tokens_file): assert_original(tokens_file) -def test_get_new_device_auth_token(authorized_client, tokens_file): - response = authorized_client.post("/auth/new_device") - assert response.status_code == 200 - assert "token" in response.json() - token = Mnemonic(language="english").to_entropy(response.json()["token"]).hex() - assert read_json(tokens_file)["new_device"]["token"] == token - - def test_get_and_delete_new_device_token(authorized_client, tokens_file): response = authorized_client.post("/auth/new_device") assert response.status_code == 200 From 179078aed2060f36dce6770fe2bfd1d74a899bd8 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 28 Dec 2022 16:17:06 +0000 Subject: [PATCH 010/246] test(tokens-repo): break out getting new device token --- tests/test_rest_endpoints/test_auth.py | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/tests/test_rest_endpoints/test_auth.py b/tests/test_rest_endpoints/test_auth.py index 3d6b256..b8e1292 100644 --- a/tests/test_rest_endpoints/test_auth.py +++ b/tests/test_rest_endpoints/test_auth.py @@ -132,15 +132,20 @@ def test_delete_token_unauthenticated(client, tokens_file): assert_original(tokens_file) -def test_get_and_authorize_new_device(client, authorized_client, tokens_file): - response = authorized_client.post("/auth/new_device") +def rest_get_new_device_token(client): + response = client.post("/auth/new_device") assert response.status_code == 200 assert "token" in response.json() - token = Mnemonic(language="english").to_entropy(response.json()["token"]).hex() - assert read_json(tokens_file)["new_device"]["token"] == token + return response.json()["token"] + + +def test_get_and_authorize_new_device(client, authorized_client, tokens_file): response = client.post( "/auth/new_device/authorize", - json={"token": response.json()["token"], "device": "new_device"}, + json={ + "token": rest_get_new_device_token(authorized_client), + "device": "new_device", + }, ) assert response.status_code == 200 assert read_json(tokens_file)["tokens"][2]["token"] == response.json()["token"] @@ -157,21 +162,17 @@ def test_authorize_new_device_with_invalid_token(client, tokens_file): def test_get_and_authorize_used_token(client, authorized_client, tokens_file): - response = authorized_client.post("/auth/new_device") - assert response.status_code == 200 - assert "token" in response.json() - token = Mnemonic(language="english").to_entropy(response.json()["token"]).hex() - assert read_json(tokens_file)["new_device"]["token"] == token + token_to_be_used_2_times = rest_get_new_device_token(authorized_client) response = client.post( "/auth/new_device/authorize", - json={"token": response.json()["token"], "device": "new_device"}, + json={"token": token_to_be_used_2_times, "device": "new_device"}, ) assert response.status_code == 200 assert read_json(tokens_file)["tokens"][2]["token"] == response.json()["token"] assert read_json(tokens_file)["tokens"][2]["name"] == "new_device" response = client.post( "/auth/new_device/authorize", - json={"token": response.json()["token"], "device": "new_device"}, + json={"token": token_to_be_used_2_times, "device": "new_device"}, ) assert response.status_code == 404 From bfcec3d51de58746ab765595d26d3eb6795119f3 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 28 Dec 2022 16:27:08 +0000 Subject: [PATCH 011/246] test(tokens-repo): break out checking token validity --- tests/test_rest_endpoints/test_auth.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/tests/test_rest_endpoints/test_auth.py b/tests/test_rest_endpoints/test_auth.py index b8e1292..9467f49 100644 --- a/tests/test_rest_endpoints/test_auth.py +++ b/tests/test_rest_endpoints/test_auth.py @@ -36,6 +36,11 @@ def assert_original(filename): assert read_json(filename) == TOKENS_FILE_CONTETS +def assert_token_valid(client, token): + client.headers.update({"Authorization": "Bearer " + token}) + assert rest_get_tokens_info(client) is not None + + def rest_get_tokens_info(client): response = client.get("/auth/tokens") assert response.status_code == 200 @@ -100,8 +105,7 @@ def test_refresh_token(authorized_client, tokens_file): response = authorized_client.post("/auth/tokens") assert response.status_code == 200 new_token = response.json()["token"] - authorized_client.headers.update({"Authorization": "Bearer " + new_token}) - assert rest_get_tokens_info(authorized_client) is not None + assert_token_valid(authorized_client, new_token) # new device @@ -148,8 +152,7 @@ def test_get_and_authorize_new_device(client, authorized_client, tokens_file): }, ) assert response.status_code == 200 - assert read_json(tokens_file)["tokens"][2]["token"] == response.json()["token"] - assert read_json(tokens_file)["tokens"][2]["name"] == "new_device" + assert_token_valid(authorized_client, response.json()["token"]) def test_authorize_new_device_with_invalid_token(client, tokens_file): @@ -168,8 +171,7 @@ def test_get_and_authorize_used_token(client, authorized_client, tokens_file): json={"token": token_to_be_used_2_times, "device": "new_device"}, ) assert response.status_code == 200 - assert read_json(tokens_file)["tokens"][2]["token"] == response.json()["token"] - assert read_json(tokens_file)["tokens"][2]["name"] == "new_device" + assert_token_valid(authorized_client, response.json()["token"]) response = client.post( "/auth/new_device/authorize", json={"token": token_to_be_used_2_times, "device": "new_device"}, From 458c4fd28aeeb116b73fe85885be4eb0e26ca2b0 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 28 Dec 2022 16:37:34 +0000 Subject: [PATCH 012/246] test(tokens-repo): make new device tests a bit more readable --- tests/test_rest_endpoints/test_auth.py | 22 +++++++--------------- 1 file changed, 7 insertions(+), 15 deletions(-) diff --git a/tests/test_rest_endpoints/test_auth.py b/tests/test_rest_endpoints/test_auth.py index 9467f49..93be5ee 100644 --- a/tests/test_rest_endpoints/test_auth.py +++ b/tests/test_rest_endpoints/test_auth.py @@ -118,14 +118,8 @@ def test_get_new_device_auth_token_unauthorized(client, tokens_file): def test_get_and_delete_new_device_token(authorized_client, tokens_file): - response = authorized_client.post("/auth/new_device") - assert response.status_code == 200 - assert "token" in response.json() - token = Mnemonic(language="english").to_entropy(response.json()["token"]).hex() - assert read_json(tokens_file)["new_device"]["token"] == token - response = authorized_client.delete( - "/auth/new_device", json={"token": response.json()["token"]} - ) + token = rest_get_new_device_token(authorized_client) + response = authorized_client.delete("/auth/new_device", json={"token": token}) assert response.status_code == 200 assert_original(tokens_file) @@ -144,10 +138,11 @@ def rest_get_new_device_token(client): def test_get_and_authorize_new_device(client, authorized_client, tokens_file): + token = rest_get_new_device_token(authorized_client) response = client.post( "/auth/new_device/authorize", json={ - "token": rest_get_new_device_token(authorized_client), + "token": token, "device": "new_device", }, ) @@ -172,6 +167,7 @@ def test_get_and_authorize_used_token(client, authorized_client, tokens_file): ) assert response.status_code == 200 assert_token_valid(authorized_client, response.json()["token"]) + response = client.post( "/auth/new_device/authorize", json={"token": token_to_be_used_2_times, "device": "new_device"}, @@ -182,11 +178,7 @@ def test_get_and_authorize_used_token(client, authorized_client, tokens_file): def test_get_and_authorize_token_after_12_minutes( client, authorized_client, tokens_file ): - response = authorized_client.post("/auth/new_device") - assert response.status_code == 200 - assert "token" in response.json() - token = Mnemonic(language="english").to_entropy(response.json()["token"]).hex() - assert read_json(tokens_file)["new_device"]["token"] == token + token = rest_get_new_device_token(authorized_client) file_data = read_json(tokens_file) file_data["new_device"]["expiration"] = str( @@ -196,7 +188,7 @@ def test_get_and_authorize_token_after_12_minutes( response = client.post( "/auth/new_device/authorize", - json={"token": response.json()["token"], "device": "new_device"}, + json={"token": token, "device": "new_device"}, ) assert response.status_code == 404 From 0bf18603d4f3b49c2be6c5648717428d76532cab Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 28 Dec 2022 17:09:19 +0000 Subject: [PATCH 013/246] test(tokens-repo): travel in time to check expiration --- tests/test_rest_endpoints/test_auth.py | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/tests/test_rest_endpoints/test_auth.py b/tests/test_rest_endpoints/test_auth.py index 93be5ee..f428904 100644 --- a/tests/test_rest_endpoints/test_auth.py +++ b/tests/test_rest_endpoints/test_auth.py @@ -31,6 +31,9 @@ DATE_FORMATS = [ "%Y-%m-%d %H:%M:%S.%f", ] +# for expiration tests. If headache, consider freezegun +DEVICE_KEY_VALIDATION_DATETIME = "selfprivacy_api.models.tokens.new_device_key.datetime" + def assert_original(filename): assert read_json(filename) == TOKENS_FILE_CONTETS @@ -176,15 +179,19 @@ def test_get_and_authorize_used_token(client, authorized_client, tokens_file): def test_get_and_authorize_token_after_12_minutes( - client, authorized_client, tokens_file + client, authorized_client, tokens_file, mocker ): token = rest_get_new_device_token(authorized_client) - file_data = read_json(tokens_file) - file_data["new_device"]["expiration"] = str( - datetime.datetime.now() - datetime.timedelta(minutes=13) - ) - write_json(tokens_file, file_data) + # TARDIS sounds + new_time = datetime.datetime.now() + datetime.timedelta(minutes=13) + + class warped_spacetime(datetime.datetime): + @classmethod + def now(cls): + return new_time + + mock = mocker.patch(DEVICE_KEY_VALIDATION_DATETIME, warped_spacetime) response = client.post( "/auth/new_device/authorize", From 74777c4343840c7421426d8a8c3631ce54de6ea2 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 30 Dec 2022 10:13:31 +0000 Subject: [PATCH 014/246] test(tokens-repo): fix typo in contets --- tests/test_rest_endpoints/test_auth.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_rest_endpoints/test_auth.py b/tests/test_rest_endpoints/test_auth.py index f428904..80cc2eb 100644 --- a/tests/test_rest_endpoints/test_auth.py +++ b/tests/test_rest_endpoints/test_auth.py @@ -9,7 +9,7 @@ from mnemonic import Mnemonic from tests.common import read_json, write_json -TOKENS_FILE_CONTETS = { +TOKENS_FILE_CONTENTS = { "tokens": [ { "token": "TEST_TOKEN", @@ -36,7 +36,7 @@ DEVICE_KEY_VALIDATION_DATETIME = "selfprivacy_api.models.tokens.new_device_key.d def assert_original(filename): - assert read_json(filename) == TOKENS_FILE_CONTETS + assert read_json(filename) == TOKENS_FILE_CONTENTS def assert_token_valid(client, token): From 0239f3174eab652e1e7fe7f6f7d33ee9976c3a53 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 30 Dec 2022 10:27:51 +0000 Subject: [PATCH 015/246] test(tokens-repo): do not read json in generate recovery test --- tests/test_rest_endpoints/test_auth.py | 20 ++++++++------------ 1 file changed, 8 insertions(+), 12 deletions(-) diff --git a/tests/test_rest_endpoints/test_auth.py b/tests/test_rest_endpoints/test_auth.py index 80cc2eb..7d1c88f 100644 --- a/tests/test_rest_endpoints/test_auth.py +++ b/tests/test_rest_endpoints/test_auth.py @@ -257,21 +257,19 @@ def test_generate_recovery_token(authorized_client, client, tokens_file): assert response.status_code == 200 assert "token" in response.json() mnemonic_token = response.json()["token"] - token = Mnemonic(language="english").to_entropy(mnemonic_token).hex() - assert read_json(tokens_file)["recovery_token"]["token"] == token - time_generated = read_json(tokens_file)["recovery_token"]["date"] - assert time_generated is not None + # Try to get token status + response = authorized_client.get("/auth/recovery_token") + assert response.status_code == 200 + assert "date" in response.json() + time_generated = response.json()["date"] + # Assert that the token was generated near the current time assert ( datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%f") - datetime.timedelta(seconds=5) < datetime.datetime.now() ) - - # Try to get token status - response = authorized_client.get("/auth/recovery_token") - assert response.status_code == 200 assert response.json() == { "exists": True, "valid": True, @@ -287,8 +285,7 @@ def test_generate_recovery_token(authorized_client, client, tokens_file): ) assert recovery_response.status_code == 200 new_token = recovery_response.json()["token"] - assert read_json(tokens_file)["tokens"][2]["token"] == new_token - assert read_json(tokens_file)["tokens"][2]["name"] == "recovery_device" + assert_token_valid(authorized_client, new_token) # Try to use token again recovery_response = client.post( @@ -297,8 +294,7 @@ def test_generate_recovery_token(authorized_client, client, tokens_file): ) assert recovery_response.status_code == 200 new_token = recovery_response.json()["token"] - assert read_json(tokens_file)["tokens"][3]["token"] == new_token - assert read_json(tokens_file)["tokens"][3]["name"] == "recovery_device2" + assert_token_valid(authorized_client, new_token) @pytest.mark.parametrize("timeformat", DATE_FORMATS) From 548f47963ad8416a9023733b72c48f667a3cf22d Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 30 Dec 2022 10:52:00 +0000 Subject: [PATCH 016/246] test(tokens-repo): break out obtaining recovery tokens --- tests/test_rest_endpoints/test_auth.py | 36 ++++++++++++++------------ 1 file changed, 19 insertions(+), 17 deletions(-) diff --git a/tests/test_rest_endpoints/test_auth.py b/tests/test_rest_endpoints/test_auth.py index 7d1c88f..49a1f3b 100644 --- a/tests/test_rest_endpoints/test_auth.py +++ b/tests/test_rest_endpoints/test_auth.py @@ -251,12 +251,25 @@ def test_get_recovery_token_when_none_exists(authorized_client, tokens_file): assert_original(tokens_file) -def test_generate_recovery_token(authorized_client, client, tokens_file): - # Generate token without expiration and uses_left - response = authorized_client.post("/auth/recovery_token") +def rest_make_recovery_token(client, expires_at=None, timeformat=None): + if expires_at is None: + response = client.post("/auth/recovery_token") + else: + assert timeformat is not None + expires_at_str = expires_at.strftime(timeformat) + response = client.post( + "/auth/recovery_token", + json={"expiration": expires_at_str}, + ) + assert response.status_code == 200 assert "token" in response.json() - mnemonic_token = response.json()["token"] + return response.json()["token"] + + +def test_generate_recovery_token(authorized_client, client, tokens_file): + # Generate token without expiration and uses_left + mnemonic_token = rest_make_recovery_token(authorized_client) # Try to get token status response = authorized_client.get("/auth/recovery_token") @@ -304,20 +317,9 @@ def test_generate_recovery_token_with_expiration_date( # Generate token with expiration date # Generate expiration date in the future expiration_date = datetime.datetime.now() + datetime.timedelta(minutes=5) - expiration_date_str = expiration_date.strftime(timeformat) - response = authorized_client.post( - "/auth/recovery_token", - json={"expiration": expiration_date_str}, + mnemonic_token = rest_make_recovery_token( + authorized_client, expires_at=expiration_date, timeformat=timeformat ) - assert response.status_code == 200 - assert "token" in response.json() - mnemonic_token = response.json()["token"] - token = Mnemonic(language="english").to_entropy(mnemonic_token).hex() - assert read_json(tokens_file)["recovery_token"]["token"] == token - assert datetime.datetime.strptime( - read_json(tokens_file)["recovery_token"]["expiration"], "%Y-%m-%dT%H:%M:%S.%f" - ) == datetime.datetime.strptime(expiration_date_str, timeformat) - time_generated = read_json(tokens_file)["recovery_token"]["date"] assert time_generated is not None # Assert that the token was generated near the current time From ac4d4e012767c1e535ac426c5dd24728ef5bc173 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 30 Dec 2022 11:14:34 +0000 Subject: [PATCH 017/246] test(tokens-repo): break out recovery time operations --- tests/test_rest_endpoints/test_auth.py | 47 ++++++++++++++------------ 1 file changed, 25 insertions(+), 22 deletions(-) diff --git a/tests/test_rest_endpoints/test_auth.py b/tests/test_rest_endpoints/test_auth.py index 49a1f3b..1a3093c 100644 --- a/tests/test_rest_endpoints/test_auth.py +++ b/tests/test_rest_endpoints/test_auth.py @@ -267,23 +267,34 @@ def rest_make_recovery_token(client, expires_at=None, timeformat=None): return response.json()["token"] -def test_generate_recovery_token(authorized_client, client, tokens_file): - # Generate token without expiration and uses_left - mnemonic_token = rest_make_recovery_token(authorized_client) - - # Try to get token status - response = authorized_client.get("/auth/recovery_token") +def rest_get_recovery_status(client): + response = client.get("/auth/recovery_token") assert response.status_code == 200 - assert "date" in response.json() - time_generated = response.json()["date"] + return response.json() - # Assert that the token was generated near the current time + +def rest_get_recovery_date(client): + status = rest_get_recovery_status(client) + assert "date" in status + return status["date"] + + +def assert_recovery_recent(time_generated): assert ( datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%f") - datetime.timedelta(seconds=5) < datetime.datetime.now() ) - assert response.json() == { + + +def test_generate_recovery_token(authorized_client, client, tokens_file): + # Generate token without expiration and uses_left + mnemonic_token = rest_make_recovery_token(authorized_client) + + time_generated = rest_get_recovery_date(authorized_client) + assert_recovery_recent(time_generated) + + assert rest_get_recovery_status(authorized_client) == { "exists": True, "valid": True, "date": time_generated, @@ -320,19 +331,11 @@ def test_generate_recovery_token_with_expiration_date( mnemonic_token = rest_make_recovery_token( authorized_client, expires_at=expiration_date, timeformat=timeformat ) - time_generated = read_json(tokens_file)["recovery_token"]["date"] - assert time_generated is not None - # Assert that the token was generated near the current time - assert ( - datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%f") - - datetime.timedelta(seconds=5) - < datetime.datetime.now() - ) - # Try to get token status - response = authorized_client.get("/auth/recovery_token") - assert response.status_code == 200 - assert response.json() == { + time_generated = rest_get_recovery_date(authorized_client) + assert_recovery_recent(time_generated) + + assert rest_get_recovery_status(authorized_client) == { "exists": True, "valid": True, "date": time_generated, From 203940096c262be37d7d5f439c49b5862a0bc70a Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 30 Dec 2022 11:51:52 +0000 Subject: [PATCH 018/246] test(tokens-repo): break out recovery token use --- tests/test_rest_endpoints/test_auth.py | 31 ++++++++++++-------------- 1 file changed, 14 insertions(+), 17 deletions(-) diff --git a/tests/test_rest_endpoints/test_auth.py b/tests/test_rest_endpoints/test_auth.py index 1a3093c..c426d54 100644 --- a/tests/test_rest_endpoints/test_auth.py +++ b/tests/test_rest_endpoints/test_auth.py @@ -287,6 +287,17 @@ def assert_recovery_recent(time_generated): ) +def rest_recover_with_mnemonic(client, mnemonic_token, device_name): + recovery_response = client.post( + "/auth/recovery_token/use", + json={"token": mnemonic_token, "device": device_name}, + ) + assert recovery_response.status_code == 200 + new_token = recovery_response.json()["token"] + assert_token_valid(client, new_token) + return new_token + + def test_generate_recovery_token(authorized_client, client, tokens_file): # Generate token without expiration and uses_left mnemonic_token = rest_make_recovery_token(authorized_client) @@ -302,23 +313,9 @@ def test_generate_recovery_token(authorized_client, client, tokens_file): "uses_left": None, } - # Try to use the token - recovery_response = client.post( - "/auth/recovery_token/use", - json={"token": mnemonic_token, "device": "recovery_device"}, - ) - assert recovery_response.status_code == 200 - new_token = recovery_response.json()["token"] - assert_token_valid(authorized_client, new_token) - - # Try to use token again - recovery_response = client.post( - "/auth/recovery_token/use", - json={"token": mnemonic_token, "device": "recovery_device2"}, - ) - assert recovery_response.status_code == 200 - new_token = recovery_response.json()["token"] - assert_token_valid(authorized_client, new_token) + rest_recover_with_mnemonic(client, mnemonic_token, "recover_device") + # And again + rest_recover_with_mnemonic(client, mnemonic_token, "recover_device2") @pytest.mark.parametrize("timeformat", DATE_FORMATS) From e0bd6efcb2b5a546225e6f6c5f3cbc61b9a4929a Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 30 Dec 2022 12:01:04 +0000 Subject: [PATCH 019/246] test(tokens-repo): use new recovery functions --- tests/test_rest_endpoints/test_auth.py | 42 +++----------------------- 1 file changed, 5 insertions(+), 37 deletions(-) diff --git a/tests/test_rest_endpoints/test_auth.py b/tests/test_rest_endpoints/test_auth.py index c426d54..bdfb579 100644 --- a/tests/test_rest_endpoints/test_auth.py +++ b/tests/test_rest_endpoints/test_auth.py @@ -340,25 +340,9 @@ def test_generate_recovery_token_with_expiration_date( "uses_left": None, } - # Try to use the token - recovery_response = client.post( - "/auth/recovery_token/use", - json={"token": mnemonic_token, "device": "recovery_device"}, - ) - assert recovery_response.status_code == 200 - new_token = recovery_response.json()["token"] - assert read_json(tokens_file)["tokens"][2]["token"] == new_token - assert read_json(tokens_file)["tokens"][2]["name"] == "recovery_device" - - # Try to use token again - recovery_response = client.post( - "/auth/recovery_token/use", - json={"token": mnemonic_token, "device": "recovery_device2"}, - ) - assert recovery_response.status_code == 200 - new_token = recovery_response.json()["token"] - assert read_json(tokens_file)["tokens"][3]["token"] == new_token - assert read_json(tokens_file)["tokens"][3]["name"] == "recovery_device2" + rest_recover_with_mnemonic(client, mnemonic_token, "recover_device") + # And again + rest_recover_with_mnemonic(client, mnemonic_token, "recover_device2") # Try to use token after expiration date new_data = read_json(tokens_file) @@ -450,16 +434,7 @@ def test_generate_recovery_token_with_limited_uses( } # Try to use the token - recovery_response = client.post( - "/auth/recovery_token/use", - json={"token": mnemonic_token, "device": "recovery_device"}, - ) - assert recovery_response.status_code == 200 - new_token = recovery_response.json()["token"] - assert read_json(tokens_file)["tokens"][2]["token"] == new_token - assert read_json(tokens_file)["tokens"][2]["name"] == "recovery_device" - - assert read_json(tokens_file)["recovery_token"]["uses_left"] == 1 + rest_recover_with_mnemonic(client, mnemonic_token, "recover_device") # Get the status of the token response = authorized_client.get("/auth/recovery_token") @@ -473,14 +448,7 @@ def test_generate_recovery_token_with_limited_uses( } # Try to use token again - recovery_response = client.post( - "/auth/recovery_token/use", - json={"token": mnemonic_token, "device": "recovery_device2"}, - ) - assert recovery_response.status_code == 200 - new_token = recovery_response.json()["token"] - assert read_json(tokens_file)["tokens"][3]["token"] == new_token - assert read_json(tokens_file)["tokens"][3]["name"] == "recovery_device2" + rest_recover_with_mnemonic(client, mnemonic_token, "recover_device2") # Get the status of the token response = authorized_client.get("/auth/recovery_token") From 3aa3d197e2af8180439c3adcea2e839b96f813f8 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 30 Dec 2022 12:33:21 +0000 Subject: [PATCH 020/246] test(tokens-repo): use mock time for recovery tokens expiration test --- tests/test_rest_endpoints/test_auth.py | 44 ++++++++++---------------- 1 file changed, 16 insertions(+), 28 deletions(-) diff --git a/tests/test_rest_endpoints/test_auth.py b/tests/test_rest_endpoints/test_auth.py index bdfb579..309cc6c 100644 --- a/tests/test_rest_endpoints/test_auth.py +++ b/tests/test_rest_endpoints/test_auth.py @@ -32,9 +32,16 @@ DATE_FORMATS = [ ] # for expiration tests. If headache, consider freezegun +RECOVERY_KEY_VALIDATION_DATETIME = "selfprivacy_api.models.tokens.recovery_key.datetime" DEVICE_KEY_VALIDATION_DATETIME = "selfprivacy_api.models.tokens.new_device_key.datetime" +class NearFuture(datetime.datetime): + @classmethod + def now(cls): + return datetime.datetime.now() + datetime.timedelta(minutes=13) + + def assert_original(filename): assert read_json(filename) == TOKENS_FILE_CONTENTS @@ -184,14 +191,7 @@ def test_get_and_authorize_token_after_12_minutes( token = rest_get_new_device_token(authorized_client) # TARDIS sounds - new_time = datetime.datetime.now() + datetime.timedelta(minutes=13) - - class warped_spacetime(datetime.datetime): - @classmethod - def now(cls): - return new_time - - mock = mocker.patch(DEVICE_KEY_VALIDATION_DATETIME, warped_spacetime) + mock = mocker.patch(DEVICE_KEY_VALIDATION_DATETIME, NearFuture) response = client.post( "/auth/new_device/authorize", @@ -320,7 +320,7 @@ def test_generate_recovery_token(authorized_client, client, tokens_file): @pytest.mark.parametrize("timeformat", DATE_FORMATS) def test_generate_recovery_token_with_expiration_date( - authorized_client, client, tokens_file, timeformat + authorized_client, client, tokens_file, timeformat, mocker ): # Generate token with expiration date # Generate expiration date in the future @@ -345,29 +345,17 @@ def test_generate_recovery_token_with_expiration_date( rest_recover_with_mnemonic(client, mnemonic_token, "recover_device2") # Try to use token after expiration date - new_data = read_json(tokens_file) - new_data["recovery_token"]["expiration"] = datetime.datetime.now().strftime( - "%Y-%m-%dT%H:%M:%S.%f" - ) - write_json(tokens_file, new_data) + mock = mocker.patch(RECOVERY_KEY_VALIDATION_DATETIME, NearFuture) + device_name = "recovery_device3" recovery_response = client.post( "/auth/recovery_token/use", - json={"token": mnemonic_token, "device": "recovery_device3"}, + json={"token": mnemonic_token, "device": device_name}, ) assert recovery_response.status_code == 404 - # Assert that the token was not created in JSON - assert read_json(tokens_file)["tokens"] == new_data["tokens"] - - # Get the status of the token - response = authorized_client.get("/auth/recovery_token") - assert response.status_code == 200 - assert response.json() == { - "exists": True, - "valid": False, - "date": time_generated, - "expiration": new_data["recovery_token"]["expiration"], - "uses_left": None, - } + # Assert that the token was not created + assert device_name not in [ + token["name"] for token in rest_get_tokens_info(authorized_client) + ] @pytest.mark.parametrize("timeformat", DATE_FORMATS) From 42fa5fe524c174abd29223ceabb8e38bd4df377b Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 30 Dec 2022 12:52:12 +0000 Subject: [PATCH 021/246] test(tokens-repo): allow ading uses in a helper recovery function --- tests/test_rest_endpoints/test_auth.py | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/tests/test_rest_endpoints/test_auth.py b/tests/test_rest_endpoints/test_auth.py index 309cc6c..33eb76a 100644 --- a/tests/test_rest_endpoints/test_auth.py +++ b/tests/test_rest_endpoints/test_auth.py @@ -251,15 +251,23 @@ def test_get_recovery_token_when_none_exists(authorized_client, tokens_file): assert_original(tokens_file) -def rest_make_recovery_token(client, expires_at=None, timeformat=None): - if expires_at is None: - response = client.post("/auth/recovery_token") - else: +def rest_make_recovery_token(client, expires_at=None, timeformat=None, uses=None): + json = {} + + if expires_at is not None: assert timeformat is not None expires_at_str = expires_at.strftime(timeformat) + json["expiration"] = expires_at_str + + if uses is not None: + json["uses"] = uses + + if json == {}: + response = client.post("/auth/recovery_token") + else: response = client.post( "/auth/recovery_token", - json={"expiration": expires_at_str}, + json=json, ) assert response.status_code == 200 From 02bfffa75fdb578058ab6269a6d8d8692c2dd221 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 30 Dec 2022 13:04:57 +0000 Subject: [PATCH 022/246] test(tokens-repo): refactor the rest of auth tests --- tests/test_rest_endpoints/test_auth.py | 46 ++++++-------------------- 1 file changed, 10 insertions(+), 36 deletions(-) diff --git a/tests/test_rest_endpoints/test_auth.py b/tests/test_rest_endpoints/test_auth.py index 33eb76a..7e55900 100644 --- a/tests/test_rest_endpoints/test_auth.py +++ b/tests/test_rest_endpoints/test_auth.py @@ -378,7 +378,7 @@ def test_generate_recovery_token_with_expiration_in_the_past( json={"expiration": expiration_date_str}, ) assert response.status_code == 400 - assert "recovery_token" not in read_json(tokens_file) + assert not rest_get_recovery_status(authorized_client)["exists"] def test_generate_recovery_token_with_invalid_time_format( @@ -391,37 +391,19 @@ def test_generate_recovery_token_with_invalid_time_format( json={"expiration": expiration_date}, ) assert response.status_code == 422 - assert "recovery_token" not in read_json(tokens_file) + assert not rest_get_recovery_status(authorized_client)["exists"] def test_generate_recovery_token_with_limited_uses( authorized_client, client, tokens_file ): # Generate token with limited uses - response = authorized_client.post( - "/auth/recovery_token", - json={"uses": 2}, - ) - assert response.status_code == 200 - assert "token" in response.json() - mnemonic_token = response.json()["token"] - token = Mnemonic(language="english").to_entropy(mnemonic_token).hex() - assert read_json(tokens_file)["recovery_token"]["token"] == token - assert read_json(tokens_file)["recovery_token"]["uses_left"] == 2 + mnemonic_token = rest_make_recovery_token(authorized_client, uses=2) - # Get the date of the token - time_generated = read_json(tokens_file)["recovery_token"]["date"] - assert time_generated is not None - assert ( - datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%f") - - datetime.timedelta(seconds=5) - < datetime.datetime.now() - ) + time_generated = rest_get_recovery_date(authorized_client) + assert_recovery_recent(time_generated) - # Try to get token status - response = authorized_client.get("/auth/recovery_token") - assert response.status_code == 200 - assert response.json() == { + assert rest_get_recovery_status(authorized_client) == { "exists": True, "valid": True, "date": time_generated, @@ -432,10 +414,7 @@ def test_generate_recovery_token_with_limited_uses( # Try to use the token rest_recover_with_mnemonic(client, mnemonic_token, "recover_device") - # Get the status of the token - response = authorized_client.get("/auth/recovery_token") - assert response.status_code == 200 - assert response.json() == { + assert rest_get_recovery_status(authorized_client) == { "exists": True, "valid": True, "date": time_generated, @@ -446,10 +425,7 @@ def test_generate_recovery_token_with_limited_uses( # Try to use token again rest_recover_with_mnemonic(client, mnemonic_token, "recover_device2") - # Get the status of the token - response = authorized_client.get("/auth/recovery_token") - assert response.status_code == 200 - assert response.json() == { + assert rest_get_recovery_status(authorized_client) == { "exists": True, "valid": False, "date": time_generated, @@ -464,8 +440,6 @@ def test_generate_recovery_token_with_limited_uses( ) assert recovery_response.status_code == 404 - assert read_json(tokens_file)["recovery_token"]["uses_left"] == 0 - def test_generate_recovery_token_with_negative_uses( authorized_client, client, tokens_file @@ -476,7 +450,7 @@ def test_generate_recovery_token_with_negative_uses( json={"uses": -2}, ) assert response.status_code == 400 - assert "recovery_token" not in read_json(tokens_file) + assert not rest_get_recovery_status(authorized_client)["exists"] def test_generate_recovery_token_with_zero_uses(authorized_client, client, tokens_file): @@ -486,4 +460,4 @@ def test_generate_recovery_token_with_zero_uses(authorized_client, client, token json={"uses": 0}, ) assert response.status_code == 400 - assert "recovery_token" not in read_json(tokens_file) + assert not rest_get_recovery_status(authorized_client)["exists"] From e55a55ef6f80c0df355a58f75857276c61029e5b Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 30 Dec 2022 13:11:10 +0000 Subject: [PATCH 023/246] test(tokens-repo): beautify test_auth.py --- tests/test_rest_endpoints/test_auth.py | 115 +++++++++++++------------ 1 file changed, 59 insertions(+), 56 deletions(-) diff --git a/tests/test_rest_endpoints/test_auth.py b/tests/test_rest_endpoints/test_auth.py index 7e55900..17585fb 100644 --- a/tests/test_rest_endpoints/test_auth.py +++ b/tests/test_rest_endpoints/test_auth.py @@ -57,6 +57,64 @@ def rest_get_tokens_info(client): return response.json() +def rest_make_recovery_token(client, expires_at=None, timeformat=None, uses=None): + json = {} + + if expires_at is not None: + assert timeformat is not None + expires_at_str = expires_at.strftime(timeformat) + json["expiration"] = expires_at_str + + if uses is not None: + json["uses"] = uses + + if json == {}: + response = client.post("/auth/recovery_token") + else: + response = client.post( + "/auth/recovery_token", + json=json, + ) + + assert response.status_code == 200 + assert "token" in response.json() + return response.json()["token"] + + +def rest_get_recovery_status(client): + response = client.get("/auth/recovery_token") + assert response.status_code == 200 + return response.json() + + +def rest_get_recovery_date(client): + status = rest_get_recovery_status(client) + assert "date" in status + return status["date"] + + +def assert_recovery_recent(time_generated): + assert ( + datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%f") + - datetime.timedelta(seconds=5) + < datetime.datetime.now() + ) + + +def rest_recover_with_mnemonic(client, mnemonic_token, device_name): + recovery_response = client.post( + "/auth/recovery_token/use", + json={"token": mnemonic_token, "device": device_name}, + ) + assert recovery_response.status_code == 200 + new_token = recovery_response.json()["token"] + assert_token_valid(client, new_token) + return new_token + + +# Tokens + + def test_get_tokens_info(authorized_client, tokens_file): assert rest_get_tokens_info(authorized_client) == [ {"name": "test_token", "date": "2022-01-14T08:31:10.789314", "is_caller": True}, @@ -118,7 +176,7 @@ def test_refresh_token(authorized_client, tokens_file): assert_token_valid(authorized_client, new_token) -# new device +# New device def test_get_new_device_auth_token_unauthorized(client, tokens_file): @@ -251,61 +309,6 @@ def test_get_recovery_token_when_none_exists(authorized_client, tokens_file): assert_original(tokens_file) -def rest_make_recovery_token(client, expires_at=None, timeformat=None, uses=None): - json = {} - - if expires_at is not None: - assert timeformat is not None - expires_at_str = expires_at.strftime(timeformat) - json["expiration"] = expires_at_str - - if uses is not None: - json["uses"] = uses - - if json == {}: - response = client.post("/auth/recovery_token") - else: - response = client.post( - "/auth/recovery_token", - json=json, - ) - - assert response.status_code == 200 - assert "token" in response.json() - return response.json()["token"] - - -def rest_get_recovery_status(client): - response = client.get("/auth/recovery_token") - assert response.status_code == 200 - return response.json() - - -def rest_get_recovery_date(client): - status = rest_get_recovery_status(client) - assert "date" in status - return status["date"] - - -def assert_recovery_recent(time_generated): - assert ( - datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%f") - - datetime.timedelta(seconds=5) - < datetime.datetime.now() - ) - - -def rest_recover_with_mnemonic(client, mnemonic_token, device_name): - recovery_response = client.post( - "/auth/recovery_token/use", - json={"token": mnemonic_token, "device": device_name}, - ) - assert recovery_response.status_code == 200 - new_token = recovery_response.json()["token"] - assert_token_valid(client, new_token) - return new_token - - def test_generate_recovery_token(authorized_client, client, tokens_file): # Generate token without expiration and uses_left mnemonic_token = rest_make_recovery_token(authorized_client) From f45567b87b9d99caef0214cb38cb25edebb0f46c Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 2 Jan 2023 14:33:48 +0000 Subject: [PATCH 024/246] fix(tokens-repo): readd gitkeep to services data folder after rebase --- tests/test_rest_endpoints/services/{ => data}/gitkeep | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename tests/test_rest_endpoints/services/{ => data}/gitkeep (100%) diff --git a/tests/test_rest_endpoints/services/gitkeep b/tests/test_rest_endpoints/services/data/gitkeep similarity index 100% rename from tests/test_rest_endpoints/services/gitkeep rename to tests/test_rest_endpoints/services/data/gitkeep From 8f645113e2fe272f6a4ac0581140d215ebda2fad Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 2 Jan 2023 15:49:55 +0000 Subject: [PATCH 025/246] test(tokens-repo): new assert_original(), no more json --- tests/test_rest_endpoints/test_auth.py | 118 ++++++++++++++----------- 1 file changed, 65 insertions(+), 53 deletions(-) diff --git a/tests/test_rest_endpoints/test_auth.py b/tests/test_rest_endpoints/test_auth.py index 17585fb..40960f0 100644 --- a/tests/test_rest_endpoints/test_auth.py +++ b/tests/test_rest_endpoints/test_auth.py @@ -3,10 +3,6 @@ # pylint: disable=missing-function-docstring import datetime import pytest -from mnemonic import Mnemonic - - -from tests.common import read_json, write_json TOKENS_FILE_CONTENTS = { @@ -14,12 +10,12 @@ TOKENS_FILE_CONTENTS = { { "token": "TEST_TOKEN", "name": "test_token", - "date": "2022-01-14 08:31:10.789314", + "date": datetime.datetime(2022, 1, 14, 8, 31, 10, 789314), }, { "token": "TEST_TOKEN2", "name": "test_token2", - "date": "2022-01-14 08:31:10.789314", + "date": datetime.datetime(2022, 1, 14, 8, 31, 10, 789314), }, ] } @@ -42,8 +38,17 @@ class NearFuture(datetime.datetime): return datetime.datetime.now() + datetime.timedelta(minutes=13) -def assert_original(filename): - assert read_json(filename) == TOKENS_FILE_CONTENTS +def assert_original(client): + new_tokens = rest_get_tokens_info(client) + + for token in TOKENS_FILE_CONTENTS["tokens"]: + assert_token_valid(client, token["token"]) + for new_token in new_tokens: + if new_token["name"] == token["name"]: + assert ( + datetime.datetime.fromisoformat(new_token["date"]) == token["date"] + ) + assert_no_recovery(client) def assert_token_valid(client, token): @@ -57,6 +62,17 @@ def rest_get_tokens_info(client): return response.json() +def rest_try_authorize_new_device(client, token, device_name): + response = client.post( + "/auth/new_device/authorize", + json={ + "token": token, + "device": device_name, + }, + ) + return response + + def rest_make_recovery_token(client, expires_at=None, timeformat=None, uses=None): json = {} @@ -101,6 +117,10 @@ def assert_recovery_recent(time_generated): ) +def assert_no_recovery(client): + assert not rest_get_recovery_status(client)["exists"] + + def rest_recover_with_mnemonic(client, mnemonic_token, device_name): recovery_response = client.post( "/auth/recovery_token/use", @@ -131,10 +151,10 @@ def test_get_tokens_unauthorized(client, tokens_file): assert response.status_code == 401 -def test_delete_token_unauthorized(client, tokens_file): +def test_delete_token_unauthorized(client, authorized_client, tokens_file): response = client.delete("/auth/tokens") assert response.status_code == 401 - assert_original(tokens_file) + assert_original(authorized_client) def test_delete_token(authorized_client, tokens_file): @@ -152,7 +172,7 @@ def test_delete_self_token(authorized_client, tokens_file): "/auth/tokens", json={"token_name": "test_token"} ) assert response.status_code == 400 - assert_original(tokens_file) + assert_original(authorized_client) def test_delete_nonexistent_token(authorized_client, tokens_file): @@ -160,13 +180,13 @@ def test_delete_nonexistent_token(authorized_client, tokens_file): "/auth/tokens", json={"token_name": "test_token3"} ) assert response.status_code == 404 - assert_original(tokens_file) + assert_original(authorized_client) -def test_refresh_token_unauthorized(client, tokens_file): +def test_refresh_token_unauthorized(client, authorized_client, tokens_file): response = client.post("/auth/tokens") assert response.status_code == 401 - assert_original(tokens_file) + assert_original(authorized_client) def test_refresh_token(authorized_client, tokens_file): @@ -179,23 +199,26 @@ def test_refresh_token(authorized_client, tokens_file): # New device -def test_get_new_device_auth_token_unauthorized(client, tokens_file): +def test_get_new_device_auth_token_unauthorized(client, authorized_client, tokens_file): response = client.post("/auth/new_device") assert response.status_code == 401 - assert_original(tokens_file) + assert "token" not in response.json() + assert "detail" in response.json() + # We only can check existence of a token we know. -def test_get_and_delete_new_device_token(authorized_client, tokens_file): +def test_get_and_delete_new_device_token(client, authorized_client, tokens_file): token = rest_get_new_device_token(authorized_client) response = authorized_client.delete("/auth/new_device", json={"token": token}) assert response.status_code == 200 - assert_original(tokens_file) + assert rest_try_authorize_new_device(client, token, "new_device").status_code == 404 -def test_delete_token_unauthenticated(client, tokens_file): - response = client.delete("/auth/new_device") +def test_delete_token_unauthenticated(client, authorized_client, tokens_file): + token = rest_get_new_device_token(authorized_client) + response = client.delete("/auth/new_device", json={"token": token}) assert response.status_code == 401 - assert_original(tokens_file) + assert rest_try_authorize_new_device(client, token, "new_device").status_code == 200 def rest_get_new_device_token(client): @@ -207,38 +230,29 @@ def rest_get_new_device_token(client): def test_get_and_authorize_new_device(client, authorized_client, tokens_file): token = rest_get_new_device_token(authorized_client) - response = client.post( - "/auth/new_device/authorize", - json={ - "token": token, - "device": "new_device", - }, - ) + response = rest_try_authorize_new_device(client, token, "new_device") assert response.status_code == 200 assert_token_valid(authorized_client, response.json()["token"]) -def test_authorize_new_device_with_invalid_token(client, tokens_file): - response = client.post( - "/auth/new_device/authorize", - json={"token": "invalid_token", "device": "new_device"}, - ) +def test_authorize_new_device_with_invalid_token( + client, authorized_client, tokens_file +): + response = rest_try_authorize_new_device(client, "invalid_token", "new_device") assert response.status_code == 404 - assert_original(tokens_file) + assert_original(authorized_client) def test_get_and_authorize_used_token(client, authorized_client, tokens_file): token_to_be_used_2_times = rest_get_new_device_token(authorized_client) - response = client.post( - "/auth/new_device/authorize", - json={"token": token_to_be_used_2_times, "device": "new_device"}, + response = rest_try_authorize_new_device( + client, token_to_be_used_2_times, "new_device" ) assert response.status_code == 200 assert_token_valid(authorized_client, response.json()["token"]) - response = client.post( - "/auth/new_device/authorize", - json={"token": token_to_be_used_2_times, "device": "new_device"}, + response = rest_try_authorize_new_device( + client, token_to_be_used_2_times, "new_device" ) assert response.status_code == 404 @@ -251,20 +265,18 @@ def test_get_and_authorize_token_after_12_minutes( # TARDIS sounds mock = mocker.patch(DEVICE_KEY_VALIDATION_DATETIME, NearFuture) - response = client.post( - "/auth/new_device/authorize", - json={"token": token, "device": "new_device"}, - ) + response = rest_try_authorize_new_device(client, token, "new_device") assert response.status_code == 404 + assert_original(authorized_client) -def test_authorize_without_token(client, tokens_file): +def test_authorize_without_token(client, authorized_client, tokens_file): response = client.post( "/auth/new_device/authorize", json={"device": "new_device"}, ) assert response.status_code == 422 - assert_original(tokens_file) + assert_original(authorized_client) # Recovery tokens @@ -290,10 +302,10 @@ def test_authorize_without_token(client, tokens_file): # - if request is invalid, returns 400 -def test_get_recovery_token_status_unauthorized(client, tokens_file): +def test_get_recovery_token_status_unauthorized(client, authorized_client, tokens_file): response = client.get("/auth/recovery_token") assert response.status_code == 401 - assert_original(tokens_file) + assert_original(authorized_client) def test_get_recovery_token_when_none_exists(authorized_client, tokens_file): @@ -306,7 +318,7 @@ def test_get_recovery_token_when_none_exists(authorized_client, tokens_file): "expiration": None, "uses_left": None, } - assert_original(tokens_file) + assert_original(authorized_client) def test_generate_recovery_token(authorized_client, client, tokens_file): @@ -381,7 +393,7 @@ def test_generate_recovery_token_with_expiration_in_the_past( json={"expiration": expiration_date_str}, ) assert response.status_code == 400 - assert not rest_get_recovery_status(authorized_client)["exists"] + assert_no_recovery(authorized_client) def test_generate_recovery_token_with_invalid_time_format( @@ -394,7 +406,7 @@ def test_generate_recovery_token_with_invalid_time_format( json={"expiration": expiration_date}, ) assert response.status_code == 422 - assert not rest_get_recovery_status(authorized_client)["exists"] + assert_no_recovery(authorized_client) def test_generate_recovery_token_with_limited_uses( @@ -453,7 +465,7 @@ def test_generate_recovery_token_with_negative_uses( json={"uses": -2}, ) assert response.status_code == 400 - assert not rest_get_recovery_status(authorized_client)["exists"] + assert_no_recovery(authorized_client) def test_generate_recovery_token_with_zero_uses(authorized_client, client, tokens_file): @@ -463,4 +475,4 @@ def test_generate_recovery_token_with_zero_uses(authorized_client, client, token json={"uses": 0}, ) assert response.status_code == 400 - assert not rest_get_recovery_status(authorized_client)["exists"] + assert_no_recovery(authorized_client) From 824b018487fe353fd3f64d2581015de821e83f3a Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 2 Jan 2023 17:22:18 +0000 Subject: [PATCH 026/246] test(tokens-repo): make shared test token state use token repo api for loading --- tests/conftest.py | 69 ++++++++++++++++--- tests/test_graphql/test_api_devices.py | 6 +- .../test_repository/test_tokens_repository.py | 26 ------- 3 files changed, 64 insertions(+), 37 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 4b65d20..bba3915 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -4,8 +4,34 @@ import os import pytest from fastapi.testclient import TestClient -from shutil import copy import os.path as path +import datetime + +# from selfprivacy_api.actions.api_tokens import TOKEN_REPO +from selfprivacy_api.models.tokens.token import Token +from selfprivacy_api.repositories.tokens.json_tokens_repository import ( + JsonTokensRepository, +) + +from tests.common import read_json + +EMPTY_TOKENS_JSON = ' {"tokens": []}' + + +TOKENS_FILE_CONTENTS = { + "tokens": [ + { + "token": "TEST_TOKEN", + "name": "test_token", + "date": datetime.datetime(2022, 1, 14, 8, 31, 10, 789314), + }, + { + "token": "TEST_TOKEN2", + "name": "test_token2", + "date": datetime.datetime(2022, 1, 14, 8, 31, 10, 789314), + }, + ] +} def pytest_generate_tests(metafunc): @@ -17,13 +43,40 @@ def global_data_dir(): @pytest.fixture -def tokens_file(mocker, tmpdir): - """Mock tokens file.""" - tmp_file = tmpdir / "tokens.json" - source_file = path.join(global_data_dir(), "tokens.json") - copy(source_file, tmp_file) - mock = mocker.patch("selfprivacy_api.utils.TOKENS_FILE", tmp_file) - return mock +def empty_tokens(mocker, tmpdir): + tokenfile = tmpdir / "empty_tokens.json" + with open(tokenfile, "w") as file: + file.write(EMPTY_TOKENS_JSON) + mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=tokenfile) + assert read_json(tokenfile)["tokens"] == [] + return tmpdir + + +@pytest.fixture +def empty_json_repo(empty_tokens): + repo = JsonTokensRepository() + for token in repo.get_tokens(): + repo.delete_token(token) + assert repo.get_tokens() == [] + return repo + + +@pytest.fixture +def tokens_file(empty_json_repo, tmpdir): + """A state with tokens""" + for token in TOKENS_FILE_CONTENTS["tokens"]: + empty_json_repo._store_token( + Token( + token=token["token"], + device_name=token["name"], + created_at=token["date"], + ) + ) + # temporary return for compatibility with older tests + + tokenfile = tmpdir / "empty_tokens.json" + assert path.exists(tokenfile) + return tokenfile @pytest.fixture diff --git a/tests/test_graphql/test_api_devices.py b/tests/test_graphql/test_api_devices.py index 07cf42a..c546238 100644 --- a/tests/test_graphql/test_api_devices.py +++ b/tests/test_graphql/test_api_devices.py @@ -17,12 +17,12 @@ TOKENS_FILE_CONTETS = { { "token": "TEST_TOKEN", "name": "test_token", - "date": "2022-01-14 08:31:10.789314", + "date": "2022-01-14T08:31:10.789314", }, { "token": "TEST_TOKEN2", "name": "test_token2", - "date": "2022-01-14 08:31:10.789314", + "date": "2022-01-14T08:31:10.789314", }, ] } @@ -118,7 +118,7 @@ def test_graphql_delete_token(authorized_client, tokens_file): { "token": "TEST_TOKEN", "name": "test_token", - "date": "2022-01-14 08:31:10.789314", + "date": "2022-01-14T08:31:10.789314", } ] } diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index b172f13..a2dbb7a 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -16,13 +16,9 @@ from selfprivacy_api.repositories.tokens.exceptions import ( TokenNotFound, NewDeviceKeyNotFound, ) -from selfprivacy_api.repositories.tokens.json_tokens_repository import ( - JsonTokensRepository, -) from selfprivacy_api.repositories.tokens.redis_tokens_repository import ( RedisTokensRepository, ) -from tests.common import read_json ORIGINAL_DEVICE_NAMES = [ @@ -33,23 +29,10 @@ ORIGINAL_DEVICE_NAMES = [ ] -EMPTY_TOKENS_JSON = ' {"tokens": []}' - - def mnemonic_from_hex(hexkey): return Mnemonic(language="english").to_mnemonic(bytes.fromhex(hexkey)) -@pytest.fixture -def empty_tokens(mocker, tmpdir): - tokens_file = tmpdir / "empty_tokens.json" - with open(tokens_file, "w") as file: - file.write(EMPTY_TOKENS_JSON) - mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=tokens_file) - assert read_json(tokens_file)["tokens"] == [] - return tmpdir - - @pytest.fixture def mock_new_device_key_generate(mocker): mock = mocker.patch( @@ -137,15 +120,6 @@ def mock_recovery_key_generate(mocker): return mock -@pytest.fixture -def empty_json_repo(empty_tokens): - repo = JsonTokensRepository() - for token in repo.get_tokens(): - repo.delete_token(token) - assert repo.get_tokens() == [] - return repo - - @pytest.fixture def empty_redis_repo(): repo = RedisTokensRepository() From 00ba76c074567481b38e23243546881144f2b39c Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 2 Jan 2023 18:22:04 +0000 Subject: [PATCH 027/246] refactor(tokens-repo): delete a stray comment --- tests/conftest.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index bba3915..891e4e9 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -7,7 +7,6 @@ from fastapi.testclient import TestClient import os.path as path import datetime -# from selfprivacy_api.actions.api_tokens import TOKEN_REPO from selfprivacy_api.models.tokens.token import Token from selfprivacy_api.repositories.tokens.json_tokens_repository import ( JsonTokensRepository, From 2f707cc0cc8a7b672a3115d1c07c67fe184dbbe1 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 4 Jan 2023 11:23:53 +0000 Subject: [PATCH 028/246] test(tokens-repo): delete extraneous test token content copies --- tests/test_graphql/test_api.py | 15 --------------- tests/test_graphql/test_api_recovery.py | 15 --------------- tests/test_rest_endpoints/test_auth.py | 16 +--------------- 3 files changed, 1 insertion(+), 45 deletions(-) diff --git a/tests/test_graphql/test_api.py b/tests/test_graphql/test_api.py index 16c7c4d..695dd8e 100644 --- a/tests/test_graphql/test_api.py +++ b/tests/test_graphql/test_api.py @@ -7,21 +7,6 @@ from tests.test_graphql.test_api_devices import API_DEVICES_QUERY from tests.test_graphql.test_api_recovery import API_RECOVERY_QUERY from tests.test_graphql.test_api_version import API_VERSION_QUERY -TOKENS_FILE_CONTETS = { - "tokens": [ - { - "token": "TEST_TOKEN", - "name": "test_token", - "date": "2022-01-14 08:31:10.789314", - }, - { - "token": "TEST_TOKEN2", - "name": "test_token2", - "date": "2022-01-14 08:31:10.789314", - }, - ] -} - def test_graphql_get_entire_api_data(authorized_client, tokens_file): response = authorized_client.post( diff --git a/tests/test_graphql/test_api_recovery.py b/tests/test_graphql/test_api_recovery.py index c5e229e..2cb824f 100644 --- a/tests/test_graphql/test_api_recovery.py +++ b/tests/test_graphql/test_api_recovery.py @@ -5,21 +5,6 @@ import datetime from tests.common import generate_api_query, mnemonic_to_hex, read_json, write_json -TOKENS_FILE_CONTETS = { - "tokens": [ - { - "token": "TEST_TOKEN", - "name": "test_token", - "date": "2022-01-14 08:31:10.789314", - }, - { - "token": "TEST_TOKEN2", - "name": "test_token2", - "date": "2022-01-14 08:31:10.789314", - }, - ] -} - API_RECOVERY_QUERY = """ recoveryKey { exists diff --git a/tests/test_rest_endpoints/test_auth.py b/tests/test_rest_endpoints/test_auth.py index 40960f0..1872203 100644 --- a/tests/test_rest_endpoints/test_auth.py +++ b/tests/test_rest_endpoints/test_auth.py @@ -4,21 +4,7 @@ import datetime import pytest - -TOKENS_FILE_CONTENTS = { - "tokens": [ - { - "token": "TEST_TOKEN", - "name": "test_token", - "date": datetime.datetime(2022, 1, 14, 8, 31, 10, 789314), - }, - { - "token": "TEST_TOKEN2", - "name": "test_token2", - "date": datetime.datetime(2022, 1, 14, 8, 31, 10, 789314), - }, - ] -} +from tests.conftest import TOKENS_FILE_CONTENTS DATE_FORMATS = [ "%Y-%m-%dT%H:%M:%S.%fZ", From d26d115172cad8c1542dc72120181d60a1246531 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 4 Jan 2023 12:31:24 +0000 Subject: [PATCH 029/246] test(tokens-repo): break out assert_original() in graphql device tests --- tests/conftest.py | 6 ++- tests/test_graphql/test_api_devices.py | 63 +++++++++++--------------- 2 files changed, 32 insertions(+), 37 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 891e4e9..212b6da 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -32,6 +32,8 @@ TOKENS_FILE_CONTENTS = { ] } +DEVICE_WE_AUTH_TESTS_WITH = TOKENS_FILE_CONTENTS["tokens"][0] + def pytest_generate_tests(metafunc): os.environ["TEST_MODE"] = "true" @@ -107,7 +109,9 @@ def authorized_client(tokens_file, huey_database, jobs_file): from selfprivacy_api.app import app client = TestClient(app) - client.headers.update({"Authorization": "Bearer TEST_TOKEN"}) + client.headers.update( + {"Authorization": "Bearer " + DEVICE_WE_AUTH_TESTS_WITH["token"]} + ) return client diff --git a/tests/test_graphql/test_api_devices.py b/tests/test_graphql/test_api_devices.py index c546238..f91b4f1 100644 --- a/tests/test_graphql/test_api_devices.py +++ b/tests/test_graphql/test_api_devices.py @@ -11,21 +11,7 @@ from selfprivacy_api.repositories.tokens.json_tokens_repository import ( from selfprivacy_api.models.tokens.token import Token from tests.common import generate_api_query, read_json, write_json - -TOKENS_FILE_CONTETS = { - "tokens": [ - { - "token": "TEST_TOKEN", - "name": "test_token", - "date": "2022-01-14T08:31:10.789314", - }, - { - "token": "TEST_TOKEN2", - "name": "test_token2", - "date": "2022-01-14T08:31:10.789314", - }, - ] -} +from tests.conftest import DEVICE_WE_AUTH_TESTS_WITH, TOKENS_FILE_CONTENTS API_DEVICES_QUERY = """ devices { @@ -41,27 +27,30 @@ def token_repo(): return JsonTokensRepository() -def test_graphql_tokens_info(authorized_client, tokens_file): - response = authorized_client.post( +def assert_original(client): + response = client.post( "/graphql", json={"query": generate_api_query([API_DEVICES_QUERY])}, ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["api"]["devices"] is not None - assert len(response.json()["data"]["api"]["devices"]) == 2 - assert ( - response.json()["data"]["api"]["devices"][0]["creationDate"] - == "2022-01-14T08:31:10.789314" - ) - assert response.json()["data"]["api"]["devices"][0]["isCaller"] is True - assert response.json()["data"]["api"]["devices"][0]["name"] == "test_token" - assert ( - response.json()["data"]["api"]["devices"][1]["creationDate"] - == "2022-01-14T08:31:10.789314" - ) - assert response.json()["data"]["api"]["devices"][1]["isCaller"] is False - assert response.json()["data"]["api"]["devices"][1]["name"] == "test_token2" + devices = response.json()["data"]["api"]["devices"] + assert devices is not None + original_devices = TOKENS_FILE_CONTENTS["tokens"] + assert len(devices) == len(original_devices) + for original_device in original_devices: + assert original_device["name"] in [device["name"] for device in devices] + for device in devices: + if device["name"] == DEVICE_WE_AUTH_TESTS_WITH["name"]: + assert device["isCaller"] is True + else: + assert device["isCaller"] is False + if device["name"] == original_device["name"]: + assert device["creationDate"] == original_device["date"].isoformat() + + +def test_graphql_tokens_info(authorized_client, tokens_file): + assert_original(authorized_client) def test_graphql_tokens_info_unauthorized(client, tokens_file): @@ -139,7 +128,7 @@ def test_graphql_delete_self_token(authorized_client, tokens_file): assert response.json()["data"]["deleteDeviceApiToken"]["success"] is False assert response.json()["data"]["deleteDeviceApiToken"]["message"] is not None assert response.json()["data"]["deleteDeviceApiToken"]["code"] == 400 - assert read_json(tokens_file) == TOKENS_FILE_CONTETS + assert_original(authorized_client) def test_graphql_delete_nonexistent_token(authorized_client, tokens_file): @@ -157,7 +146,7 @@ def test_graphql_delete_nonexistent_token(authorized_client, tokens_file): assert response.json()["data"]["deleteDeviceApiToken"]["success"] is False assert response.json()["data"]["deleteDeviceApiToken"]["message"] is not None assert response.json()["data"]["deleteDeviceApiToken"]["code"] == 404 - assert read_json(tokens_file) == TOKENS_FILE_CONTETS + assert_original(authorized_client) REFRESH_TOKEN_MUTATION = """ @@ -294,7 +283,7 @@ def test_graphql_get_and_delete_new_device_key(authorized_client, tokens_file): assert response.json()["data"]["invalidateNewDeviceApiKey"]["success"] is True assert response.json()["data"]["invalidateNewDeviceApiKey"]["message"] is not None assert response.json()["data"]["invalidateNewDeviceApiKey"]["code"] == 200 - assert read_json(tokens_file) == TOKENS_FILE_CONTETS + assert_original(authorized_client) AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION = """ @@ -347,7 +336,9 @@ def test_graphql_get_and_authorize_new_device(client, authorized_client, tokens_ assert read_json(tokens_file)["tokens"][2]["name"] == "new_device" -def test_graphql_authorize_new_device_with_invalid_key(client, tokens_file): +def test_graphql_authorize_new_device_with_invalid_key( + client, authorized_client, tokens_file +): response = client.post( "/graphql", json={ @@ -367,7 +358,7 @@ def test_graphql_authorize_new_device_with_invalid_key(client, tokens_file): response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None ) assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404 - assert read_json(tokens_file) == TOKENS_FILE_CONTETS + assert_original(authorized_client) def test_graphql_get_and_authorize_used_key(client, authorized_client, tokens_file): From d8c78cc14c87c2de2d97b63a75580652434fa853 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 4 Jan 2023 13:18:38 +0000 Subject: [PATCH 030/246] test(tokens-repo): untie token deletion tests from json --- tests/test_graphql/test_api_devices.py | 51 ++++++++++++++++---------- 1 file changed, 31 insertions(+), 20 deletions(-) diff --git a/tests/test_graphql/test_api_devices.py b/tests/test_graphql/test_api_devices.py index f91b4f1..437470a 100644 --- a/tests/test_graphql/test_api_devices.py +++ b/tests/test_graphql/test_api_devices.py @@ -13,6 +13,8 @@ from selfprivacy_api.models.tokens.token import Token from tests.common import generate_api_query, read_json, write_json from tests.conftest import DEVICE_WE_AUTH_TESTS_WITH, TOKENS_FILE_CONTENTS +ORIGINAL_DEVICES = TOKENS_FILE_CONTENTS["tokens"] + API_DEVICES_QUERY = """ devices { creationDate @@ -27,7 +29,7 @@ def token_repo(): return JsonTokensRepository() -def assert_original(client): +def graphql_get_devices(client): response = client.post( "/graphql", json={"query": generate_api_query([API_DEVICES_QUERY])}, @@ -36,19 +38,30 @@ def assert_original(client): assert response.json().get("data") is not None devices = response.json()["data"]["api"]["devices"] assert devices is not None - original_devices = TOKENS_FILE_CONTENTS["tokens"] - assert len(devices) == len(original_devices) - for original_device in original_devices: - assert original_device["name"] in [device["name"] for device in devices] - for device in devices: - if device["name"] == DEVICE_WE_AUTH_TESTS_WITH["name"]: - assert device["isCaller"] is True - else: - assert device["isCaller"] is False + return devices + + +def assert_same(graphql_devices, abstract_devices): + """Orderless comparison""" + assert len(graphql_devices) == len(abstract_devices) + for original_device in abstract_devices: + assert original_device["name"] in [device["name"] for device in graphql_devices] + for device in graphql_devices: if device["name"] == original_device["name"]: assert device["creationDate"] == original_device["date"].isoformat() +def assert_original(client): + devices = graphql_get_devices(client) + assert_same(devices, ORIGINAL_DEVICES) + + for device in devices: + if device["name"] == DEVICE_WE_AUTH_TESTS_WITH["name"]: + assert device["isCaller"] is True + else: + assert device["isCaller"] is False + + def test_graphql_tokens_info(authorized_client, tokens_file): assert_original(authorized_client) @@ -88,12 +101,16 @@ def test_graphql_delete_token_unauthorized(client, tokens_file): def test_graphql_delete_token(authorized_client, tokens_file): + test_devices = ORIGINAL_DEVICES.copy() + device_to_delete = test_devices.pop(1) + assert device_to_delete != DEVICE_WE_AUTH_TESTS_WITH + response = authorized_client.post( "/graphql", json={ "query": DELETE_TOKEN_MUTATION, "variables": { - "device": "test_token2", + "device": device_to_delete["name"], }, }, ) @@ -102,15 +119,9 @@ def test_graphql_delete_token(authorized_client, tokens_file): assert response.json()["data"]["deleteDeviceApiToken"]["success"] is True assert response.json()["data"]["deleteDeviceApiToken"]["message"] is not None assert response.json()["data"]["deleteDeviceApiToken"]["code"] == 200 - assert read_json(tokens_file) == { - "tokens": [ - { - "token": "TEST_TOKEN", - "name": "test_token", - "date": "2022-01-14T08:31:10.789314", - } - ] - } + + devices = graphql_get_devices(authorized_client) + assert_same(devices, test_devices) def test_graphql_delete_self_token(authorized_client, tokens_file): From 7f5236701e023a675191f1dc960a5413e631db0a Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 4 Jan 2023 14:01:23 +0000 Subject: [PATCH 031/246] test(tokens-repo): break out assert_ok() and assert_errorcode() in graphql --- tests/test_graphql/test_api_devices.py | 28 +++++++++++++++++--------- 1 file changed, 18 insertions(+), 10 deletions(-) diff --git a/tests/test_graphql/test_api_devices.py b/tests/test_graphql/test_api_devices.py index 437470a..5f88079 100644 --- a/tests/test_graphql/test_api_devices.py +++ b/tests/test_graphql/test_api_devices.py @@ -62,6 +62,22 @@ def assert_original(client): assert device["isCaller"] is False +def assert_ok(response, request): + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"][request]["success"] is True + assert response.json()["data"][request]["message"] is not None + assert response.json()["data"][request]["code"] == 200 + + +def assert_errorcode(response, request, code): + assert response.status_code == 200 + assert response.json().get("data") is not None + assert response.json()["data"][request]["success"] is False + assert response.json()["data"][request]["message"] is not None + assert response.json()["data"][request]["code"] == code + + def test_graphql_tokens_info(authorized_client, tokens_file): assert_original(authorized_client) @@ -114,11 +130,7 @@ def test_graphql_delete_token(authorized_client, tokens_file): }, }, ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["deleteDeviceApiToken"]["success"] is True - assert response.json()["data"]["deleteDeviceApiToken"]["message"] is not None - assert response.json()["data"]["deleteDeviceApiToken"]["code"] == 200 + assert_ok(response, "deleteDeviceApiToken") devices = graphql_get_devices(authorized_client) assert_same(devices, test_devices) @@ -134,11 +146,7 @@ def test_graphql_delete_self_token(authorized_client, tokens_file): }, }, ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["deleteDeviceApiToken"]["success"] is False - assert response.json()["data"]["deleteDeviceApiToken"]["message"] is not None - assert response.json()["data"]["deleteDeviceApiToken"]["code"] == 400 + assert_errorcode(response, "deleteDeviceApiToken", 400) assert_original(authorized_client) From 5a1b48fa3d00f129f568ac9b563348c7fe1e76c3 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 4 Jan 2023 14:15:12 +0000 Subject: [PATCH 032/246] test(tokens-repo): break out assert_empty() --- tests/test_graphql/test_api_devices.py | 23 +++++++++++------------ 1 file changed, 11 insertions(+), 12 deletions(-) diff --git a/tests/test_graphql/test_api_devices.py b/tests/test_graphql/test_api_devices.py index 5f88079..90f1685 100644 --- a/tests/test_graphql/test_api_devices.py +++ b/tests/test_graphql/test_api_devices.py @@ -78,6 +78,11 @@ def assert_errorcode(response, request, code): assert response.json()["data"][request]["code"] == code +def assert_empty(response): + assert response.status_code == 200 + assert response.json().get("data") is None + + def test_graphql_tokens_info(authorized_client, tokens_file): assert_original(authorized_client) @@ -87,8 +92,7 @@ def test_graphql_tokens_info_unauthorized(client, tokens_file): "/graphql", json={"query": generate_api_query([API_DEVICES_QUERY])}, ) - assert response.status_code == 200 - assert response.json()["data"] is None + assert_empty(response) DELETE_TOKEN_MUTATION = """ @@ -112,8 +116,7 @@ def test_graphql_delete_token_unauthorized(client, tokens_file): }, }, ) - assert response.status_code == 200 - assert response.json()["data"] is None + assert_empty(response) def test_graphql_delete_token(authorized_client, tokens_file): @@ -185,8 +188,7 @@ def test_graphql_refresh_token_unauthorized(client, tokens_file): "/graphql", json={"query": REFRESH_TOKEN_MUTATION}, ) - assert response.status_code == 200 - assert response.json()["data"] is None + assert_empty(response) def test_graphql_refresh_token(authorized_client, tokens_file, token_repo): @@ -224,8 +226,7 @@ def test_graphql_get_new_device_auth_key_unauthorized(client, tokens_file): "/graphql", json={"query": NEW_DEVICE_KEY_MUTATION}, ) - assert response.status_code == 200 - assert response.json()["data"] is None + assert_empty(response) def test_graphql_get_new_device_auth_key(authorized_client, tokens_file): @@ -270,8 +271,7 @@ def test_graphql_invalidate_new_device_token_unauthorized(client, tokens_file): }, }, ) - assert response.status_code == 200 - assert response.json()["data"] is None + assert_empty(response) def test_graphql_get_and_delete_new_device_key(authorized_client, tokens_file): @@ -502,5 +502,4 @@ def test_graphql_authorize_without_token(client, tokens_file): }, }, ) - assert response.status_code == 200 - assert response.json().get("data") is None + assert_empty(response) From 4676e364a69f18eab960fb31b0e46fcf3d55ac4a Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 4 Jan 2023 14:22:14 +0000 Subject: [PATCH 033/246] test(tokens-repo): break out assert_data() --- tests/test_graphql/test_api_devices.py | 30 +++++++++++++++----------- 1 file changed, 17 insertions(+), 13 deletions(-) diff --git a/tests/test_graphql/test_api_devices.py b/tests/test_graphql/test_api_devices.py index 90f1685..3104874 100644 --- a/tests/test_graphql/test_api_devices.py +++ b/tests/test_graphql/test_api_devices.py @@ -34,9 +34,8 @@ def graphql_get_devices(client): "/graphql", json={"query": generate_api_query([API_DEVICES_QUERY])}, ) - assert response.status_code == 200 - assert response.json().get("data") is not None - devices = response.json()["data"]["api"]["devices"] + data = assert_data(response) + devices = data["api"]["devices"] assert devices is not None return devices @@ -63,19 +62,17 @@ def assert_original(client): def assert_ok(response, request): - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"][request]["success"] is True - assert response.json()["data"][request]["message"] is not None - assert response.json()["data"][request]["code"] == 200 + data = assert_data(response) + data[request]["success"] is True + data[request]["message"] is not None + data[request]["code"] == 200 def assert_errorcode(response, request, code): - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"][request]["success"] is False - assert response.json()["data"][request]["message"] is not None - assert response.json()["data"][request]["code"] == code + data = assert_data(response) + data[request]["success"] is False + data[request]["message"] is not None + data[request]["code"] == code def assert_empty(response): @@ -83,6 +80,13 @@ def assert_empty(response): assert response.json().get("data") is None +def assert_data(response): + assert response.status_code == 200 + data = response.json().get("data") + assert data is not None + return data + + def test_graphql_tokens_info(authorized_client, tokens_file): assert_original(authorized_client) From ba5f91b00017186764767e1953a97086cedfb8c4 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 4 Jan 2023 15:05:01 +0000 Subject: [PATCH 034/246] test(tokens-repo): apply assert_ok and assert_error() --- tests/test_graphql/test_api_devices.py | 96 ++++++-------------------- 1 file changed, 21 insertions(+), 75 deletions(-) diff --git a/tests/test_graphql/test_api_devices.py b/tests/test_graphql/test_api_devices.py index 3104874..f6ac3ac 100644 --- a/tests/test_graphql/test_api_devices.py +++ b/tests/test_graphql/test_api_devices.py @@ -167,11 +167,8 @@ def test_graphql_delete_nonexistent_token(authorized_client, tokens_file): }, }, ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["deleteDeviceApiToken"]["success"] is False - assert response.json()["data"]["deleteDeviceApiToken"]["message"] is not None - assert response.json()["data"]["deleteDeviceApiToken"]["code"] == 404 + assert_errorcode(response, "deleteDeviceApiToken", 404) + assert_original(authorized_client) @@ -200,11 +197,8 @@ def test_graphql_refresh_token(authorized_client, tokens_file, token_repo): "/graphql", json={"query": REFRESH_TOKEN_MUTATION}, ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["refreshDeviceApiToken"]["success"] is True - assert response.json()["data"]["refreshDeviceApiToken"]["message"] is not None - assert response.json()["data"]["refreshDeviceApiToken"]["code"] == 200 + assert_ok(response, "refreshDeviceApiToken") + token = token_repo.get_token_by_name("test_token") assert token == Token( token=response.json()["data"]["refreshDeviceApiToken"]["token"], @@ -238,11 +232,8 @@ def test_graphql_get_new_device_auth_key(authorized_client, tokens_file): "/graphql", json={"query": NEW_DEVICE_KEY_MUTATION}, ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["getNewDeviceApiKey"]["success"] is True - assert response.json()["data"]["getNewDeviceApiKey"]["message"] is not None - assert response.json()["data"]["getNewDeviceApiKey"]["code"] == 200 + assert_ok(response, "getNewDeviceApiKey") + assert ( response.json()["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12 ) @@ -283,11 +274,8 @@ def test_graphql_get_and_delete_new_device_key(authorized_client, tokens_file): "/graphql", json={"query": NEW_DEVICE_KEY_MUTATION}, ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["getNewDeviceApiKey"]["success"] is True - assert response.json()["data"]["getNewDeviceApiKey"]["message"] is not None - assert response.json()["data"]["getNewDeviceApiKey"]["code"] == 200 + assert_ok(response, "getNewDeviceApiKey") + assert ( response.json()["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12 ) @@ -301,11 +289,7 @@ def test_graphql_get_and_delete_new_device_key(authorized_client, tokens_file): "/graphql", json={"query": INVALIDATE_NEW_DEVICE_KEY_MUTATION}, ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["invalidateNewDeviceApiKey"]["success"] is True - assert response.json()["data"]["invalidateNewDeviceApiKey"]["message"] is not None - assert response.json()["data"]["invalidateNewDeviceApiKey"]["code"] == 200 + assert_ok(response, "invalidateNewDeviceApiKey") assert_original(authorized_client) @@ -326,15 +310,13 @@ def test_graphql_get_and_authorize_new_device(client, authorized_client, tokens_ "/graphql", json={"query": NEW_DEVICE_KEY_MUTATION}, ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["getNewDeviceApiKey"]["success"] is True - assert response.json()["data"]["getNewDeviceApiKey"]["message"] is not None - assert response.json()["data"]["getNewDeviceApiKey"]["code"] == 200 + assert_ok(response, "getNewDeviceApiKey") + mnemonic_key = response.json()["data"]["getNewDeviceApiKey"]["key"] assert mnemonic_key.split(" ").__len__() == 12 key = Mnemonic(language="english").to_entropy(mnemonic_key).hex() assert read_json(tokens_file)["new_device"]["token"] == key + response = client.post( "/graphql", json={ @@ -347,13 +329,8 @@ def test_graphql_get_and_authorize_new_device(client, authorized_client, tokens_ }, }, ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["success"] is True - assert ( - response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None - ) - assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 200 + + assert_ok(response, "authorizeWithNewDeviceApiKey") token = response.json()["data"]["authorizeWithNewDeviceApiKey"]["token"] assert read_json(tokens_file)["tokens"][2]["token"] == token assert read_json(tokens_file)["tokens"][2]["name"] == "new_device" @@ -374,13 +351,7 @@ def test_graphql_authorize_new_device_with_invalid_key( }, }, ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["success"] is False - assert ( - response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None - ) - assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404 + assert_errorcode(response, "authorizeWithNewDeviceApiKey", 404) assert_original(authorized_client) @@ -389,15 +360,12 @@ def test_graphql_get_and_authorize_used_key(client, authorized_client, tokens_fi "/graphql", json={"query": NEW_DEVICE_KEY_MUTATION}, ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["getNewDeviceApiKey"]["success"] is True - assert response.json()["data"]["getNewDeviceApiKey"]["message"] is not None - assert response.json()["data"]["getNewDeviceApiKey"]["code"] == 200 + assert_ok(response, "getNewDeviceApiKey") mnemonic_key = response.json()["data"]["getNewDeviceApiKey"]["key"] assert mnemonic_key.split(" ").__len__() == 12 key = Mnemonic(language="english").to_entropy(mnemonic_key).hex() assert read_json(tokens_file)["new_device"]["token"] == key + response = client.post( "/graphql", json={ @@ -410,13 +378,7 @@ def test_graphql_get_and_authorize_used_key(client, authorized_client, tokens_fi }, }, ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["success"] is True - assert ( - response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None - ) - assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 200 + assert_ok(response, "authorizeWithNewDeviceApiKey") assert ( read_json(tokens_file)["tokens"][2]["token"] == response.json()["data"]["authorizeWithNewDeviceApiKey"]["token"] @@ -435,13 +397,7 @@ def test_graphql_get_and_authorize_used_key(client, authorized_client, tokens_fi }, }, ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["success"] is False - assert ( - response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None - ) - assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404 + assert_errorcode(response, "authorizeWithNewDeviceApiKey", 404) assert read_json(tokens_file)["tokens"].__len__() == 3 @@ -452,11 +408,7 @@ def test_graphql_get_and_authorize_key_after_12_minutes( "/graphql", json={"query": NEW_DEVICE_KEY_MUTATION}, ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["getNewDeviceApiKey"]["success"] is True - assert response.json()["data"]["getNewDeviceApiKey"]["message"] is not None - assert response.json()["data"]["getNewDeviceApiKey"]["code"] == 200 + assert_ok(response, "getNewDeviceApiKey") assert ( response.json()["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12 ) @@ -485,13 +437,7 @@ def test_graphql_get_and_authorize_key_after_12_minutes( }, }, ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["success"] is False - assert ( - response.json()["data"]["authorizeWithNewDeviceApiKey"]["message"] is not None - ) - assert response.json()["data"]["authorizeWithNewDeviceApiKey"]["code"] == 404 + assert_errorcode(response, "authorizeWithNewDeviceApiKey", 404) def test_graphql_authorize_without_token(client, tokens_file): From 469f9d292d8d9d3297ba3d28bc05f64a9a1f5737 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 4 Jan 2023 15:08:15 +0000 Subject: [PATCH 035/246] test(tokens-repo): make sure we try to delete the token we authed with --- tests/test_graphql/test_api_devices.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_graphql/test_api_devices.py b/tests/test_graphql/test_api_devices.py index f6ac3ac..6ee1ab4 100644 --- a/tests/test_graphql/test_api_devices.py +++ b/tests/test_graphql/test_api_devices.py @@ -149,7 +149,7 @@ def test_graphql_delete_self_token(authorized_client, tokens_file): json={ "query": DELETE_TOKEN_MUTATION, "variables": { - "device": "test_token", + "device": DEVICE_WE_AUTH_TESTS_WITH["name"], }, }, ) From 6eb5800e4e0d63517cbe622ee6c93c49e9d7bffb Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 4 Jan 2023 15:37:48 +0000 Subject: [PATCH 036/246] test(tokens-repo): untie refresh token testing from token repo --- tests/test_graphql/test_api_devices.py | 40 +++++++++++++++----------- 1 file changed, 23 insertions(+), 17 deletions(-) diff --git a/tests/test_graphql/test_api_devices.py b/tests/test_graphql/test_api_devices.py index 6ee1ab4..780611f 100644 --- a/tests/test_graphql/test_api_devices.py +++ b/tests/test_graphql/test_api_devices.py @@ -2,13 +2,8 @@ # pylint: disable=unused-argument # pylint: disable=missing-function-docstring import datetime -import pytest from mnemonic import Mnemonic -from selfprivacy_api.repositories.tokens.json_tokens_repository import ( - JsonTokensRepository, -) -from selfprivacy_api.models.tokens.token import Token from tests.common import generate_api_query, read_json, write_json from tests.conftest import DEVICE_WE_AUTH_TESTS_WITH, TOKENS_FILE_CONTENTS @@ -24,11 +19,6 @@ devices { """ -@pytest.fixture -def token_repo(): - return JsonTokensRepository() - - def graphql_get_devices(client): response = client.post( "/graphql", @@ -40,6 +30,13 @@ def graphql_get_devices(client): return devices +def graphql_get_caller_token_info(client): + devices = graphql_get_devices(client) + for device in devices: + if device["isCaller"] is True: + return device + + def assert_same(graphql_devices, abstract_devices): """Orderless comparison""" assert len(graphql_devices) == len(abstract_devices) @@ -87,6 +84,15 @@ def assert_data(response): return data +def set_client_token(client, token): + client.headers.update({"Authorization": "Bearer " + token}) + + +def assert_token_valid(client, token): + set_client_token(client, token) + assert graphql_get_devices(client) is not None + + def test_graphql_tokens_info(authorized_client, tokens_file): assert_original(authorized_client) @@ -192,19 +198,19 @@ def test_graphql_refresh_token_unauthorized(client, tokens_file): assert_empty(response) -def test_graphql_refresh_token(authorized_client, tokens_file, token_repo): +def test_graphql_refresh_token(authorized_client, client, tokens_file): + caller_name_and_date = graphql_get_caller_token_info(authorized_client) response = authorized_client.post( "/graphql", json={"query": REFRESH_TOKEN_MUTATION}, ) assert_ok(response, "refreshDeviceApiToken") - token = token_repo.get_token_by_name("test_token") - assert token == Token( - token=response.json()["data"]["refreshDeviceApiToken"]["token"], - device_name="test_token", - created_at=datetime.datetime(2022, 1, 14, 8, 31, 10, 789314), - ) + new_token = response.json()["data"]["refreshDeviceApiToken"]["token"] + assert_token_valid(client, new_token) + + set_client_token(client, new_token) + assert graphql_get_caller_token_info(client) == caller_name_and_date NEW_DEVICE_KEY_MUTATION = """ From 102d6b1c5c3eaebd75397bf26feed44919bc2df9 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 4 Jan 2023 15:41:45 +0000 Subject: [PATCH 037/246] test(tokens-repo): delete get new device key standalone test we can only see if device key is valid by using it or deleting it. another test does it --- tests/test_graphql/test_api_devices.py | 18 ------------------ 1 file changed, 18 deletions(-) diff --git a/tests/test_graphql/test_api_devices.py b/tests/test_graphql/test_api_devices.py index 780611f..ea926ea 100644 --- a/tests/test_graphql/test_api_devices.py +++ b/tests/test_graphql/test_api_devices.py @@ -233,24 +233,6 @@ def test_graphql_get_new_device_auth_key_unauthorized(client, tokens_file): assert_empty(response) -def test_graphql_get_new_device_auth_key(authorized_client, tokens_file): - response = authorized_client.post( - "/graphql", - json={"query": NEW_DEVICE_KEY_MUTATION}, - ) - assert_ok(response, "getNewDeviceApiKey") - - assert ( - response.json()["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12 - ) - token = ( - Mnemonic(language="english") - .to_entropy(response.json()["data"]["getNewDeviceApiKey"]["key"]) - .hex() - ) - assert read_json(tokens_file)["new_device"]["token"] == token - - INVALIDATE_NEW_DEVICE_KEY_MUTATION = """ mutation InvalidateNewDeviceKey { invalidateNewDeviceApiKey { From e739921835fbbdfc517ca9c9f4a8c78b79f7b148 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 4 Jan 2023 16:08:40 +0000 Subject: [PATCH 038/246] test(tokens-repo): untie get and delete new device from json --- tests/test_graphql/test_api_devices.py | 61 ++++++++++++++------------ 1 file changed, 33 insertions(+), 28 deletions(-) diff --git a/tests/test_graphql/test_api_devices.py b/tests/test_graphql/test_api_devices.py index ea926ea..51d729c 100644 --- a/tests/test_graphql/test_api_devices.py +++ b/tests/test_graphql/test_api_devices.py @@ -93,6 +93,33 @@ def assert_token_valid(client, token): assert graphql_get_devices(client) is not None +def graphql_get_new_device_key(authorized_client) -> str: + response = authorized_client.post( + "/graphql", + json={"query": NEW_DEVICE_KEY_MUTATION}, + ) + assert_ok(response, "getNewDeviceApiKey") + + key = response.json()["data"]["getNewDeviceApiKey"]["key"] + assert key.split(" ").__len__() == 12 + return key + + +def graphql_try_auth_new_device(client, mnemonic_key, device_name): + return client.post( + "/graphql", + json={ + "query": AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION, + "variables": { + "input": { + "key": mnemonic_key, + "deviceName": device_name, + } + }, + }, + ) + + def test_graphql_tokens_info(authorized_client, tokens_file): assert_original(authorized_client) @@ -257,28 +284,17 @@ def test_graphql_invalidate_new_device_token_unauthorized(client, tokens_file): assert_empty(response) -def test_graphql_get_and_delete_new_device_key(authorized_client, tokens_file): - response = authorized_client.post( - "/graphql", - json={"query": NEW_DEVICE_KEY_MUTATION}, - ) - assert_ok(response, "getNewDeviceApiKey") +def test_graphql_get_and_delete_new_device_key(client, authorized_client, tokens_file): + mnemonic_key = graphql_get_new_device_key(authorized_client) - assert ( - response.json()["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12 - ) - token = ( - Mnemonic(language="english") - .to_entropy(response.json()["data"]["getNewDeviceApiKey"]["key"]) - .hex() - ) - assert read_json(tokens_file)["new_device"]["token"] == token response = authorized_client.post( "/graphql", json={"query": INVALIDATE_NEW_DEVICE_KEY_MUTATION}, ) assert_ok(response, "invalidateNewDeviceApiKey") - assert_original(authorized_client) + + response = graphql_try_auth_new_device(client, mnemonic_key, "new_device") + assert_errorcode(response, "authorizeWithNewDeviceApiKey", 404) AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION = """ @@ -305,18 +321,7 @@ def test_graphql_get_and_authorize_new_device(client, authorized_client, tokens_ key = Mnemonic(language="english").to_entropy(mnemonic_key).hex() assert read_json(tokens_file)["new_device"]["token"] == key - response = client.post( - "/graphql", - json={ - "query": AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION, - "variables": { - "input": { - "key": mnemonic_key, - "deviceName": "new_device", - } - }, - }, - ) + response = graphql_try_auth_new_device(client, mnemonic_key, "new_device") assert_ok(response, "authorizeWithNewDeviceApiKey") token = response.json()["data"]["authorizeWithNewDeviceApiKey"]["token"] From 367ba51c9d0c444a6f0c15111c301a33c7c7f3f6 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 4 Jan 2023 16:26:59 +0000 Subject: [PATCH 039/246] test(tokens-repo): untie authorize new device from json --- tests/test_graphql/test_api_devices.py | 28 ++++++++++++-------------- 1 file changed, 13 insertions(+), 15 deletions(-) diff --git a/tests/test_graphql/test_api_devices.py b/tests/test_graphql/test_api_devices.py index 51d729c..4b792db 100644 --- a/tests/test_graphql/test_api_devices.py +++ b/tests/test_graphql/test_api_devices.py @@ -120,6 +120,13 @@ def graphql_try_auth_new_device(client, mnemonic_key, device_name): ) +def graphql_authorize_new_device(client, mnemonic_key, device_name) -> str: + response = graphql_try_auth_new_device(client, mnemonic_key, "new_device") + assert_ok(response, "authorizeWithNewDeviceApiKey") + token = response.json()["data"]["authorizeWithNewDeviceApiKey"]["token"] + assert_token_valid(client, token) + + def test_graphql_tokens_info(authorized_client, tokens_file): assert_original(authorized_client) @@ -310,23 +317,14 @@ mutation AuthorizeWithNewDeviceKey($input: UseNewDeviceKeyInput!) { def test_graphql_get_and_authorize_new_device(client, authorized_client, tokens_file): - response = authorized_client.post( - "/graphql", - json={"query": NEW_DEVICE_KEY_MUTATION}, - ) - assert_ok(response, "getNewDeviceApiKey") + mnemonic_key = graphql_get_new_device_key(authorized_client) + old_devices = graphql_get_devices(authorized_client) - mnemonic_key = response.json()["data"]["getNewDeviceApiKey"]["key"] - assert mnemonic_key.split(" ").__len__() == 12 - key = Mnemonic(language="english").to_entropy(mnemonic_key).hex() - assert read_json(tokens_file)["new_device"]["token"] == key + graphql_authorize_new_device(client, mnemonic_key, "new_device") + new_devices = graphql_get_devices(authorized_client) - response = graphql_try_auth_new_device(client, mnemonic_key, "new_device") - - assert_ok(response, "authorizeWithNewDeviceApiKey") - token = response.json()["data"]["authorizeWithNewDeviceApiKey"]["token"] - assert read_json(tokens_file)["tokens"][2]["token"] == token - assert read_json(tokens_file)["tokens"][2]["name"] == "new_device" + assert len(new_devices) == len(old_devices) + 1 + assert "new_device" in [device["name"] for device in new_devices] def test_graphql_authorize_new_device_with_invalid_key( From 592d62f53f0857be7e9883cd72c1852bf05fbc45 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 4 Jan 2023 16:37:27 +0000 Subject: [PATCH 040/246] test(tokens-repo): untie double new device auth from json --- tests/test_graphql/test_api_devices.py | 46 ++++---------------------- 1 file changed, 6 insertions(+), 40 deletions(-) diff --git a/tests/test_graphql/test_api_devices.py b/tests/test_graphql/test_api_devices.py index 4b792db..a88493c 100644 --- a/tests/test_graphql/test_api_devices.py +++ b/tests/test_graphql/test_api_devices.py @@ -347,49 +347,15 @@ def test_graphql_authorize_new_device_with_invalid_key( def test_graphql_get_and_authorize_used_key(client, authorized_client, tokens_file): - response = authorized_client.post( - "/graphql", - json={"query": NEW_DEVICE_KEY_MUTATION}, - ) - assert_ok(response, "getNewDeviceApiKey") - mnemonic_key = response.json()["data"]["getNewDeviceApiKey"]["key"] - assert mnemonic_key.split(" ").__len__() == 12 - key = Mnemonic(language="english").to_entropy(mnemonic_key).hex() - assert read_json(tokens_file)["new_device"]["token"] == key + mnemonic_key = graphql_get_new_device_key(authorized_client) - response = client.post( - "/graphql", - json={ - "query": AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION, - "variables": { - "input": { - "key": mnemonic_key, - "deviceName": "new_token", - } - }, - }, - ) - assert_ok(response, "authorizeWithNewDeviceApiKey") - assert ( - read_json(tokens_file)["tokens"][2]["token"] - == response.json()["data"]["authorizeWithNewDeviceApiKey"]["token"] - ) - assert read_json(tokens_file)["tokens"][2]["name"] == "new_token" + graphql_authorize_new_device(client, mnemonic_key, "new_device") + devices = graphql_get_devices(authorized_client) - response = client.post( - "/graphql", - json={ - "query": AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION, - "variables": { - "input": { - "key": mnemonic_key, - "deviceName": "test_token2", - } - }, - }, - ) + response = graphql_try_auth_new_device(client, mnemonic_key, "new_device2") assert_errorcode(response, "authorizeWithNewDeviceApiKey", 404) - assert read_json(tokens_file)["tokens"].__len__() == 3 + + assert graphql_get_devices(authorized_client) == devices def test_graphql_get_and_authorize_key_after_12_minutes( From 0aaa90f54a6a8ac38ce74bae126eef0d8a8e6fcf Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 4 Jan 2023 16:42:50 +0000 Subject: [PATCH 041/246] test(tokens-repo): shrink invalid new device test --- tests/test_graphql/test_api_devices.py | 14 ++------------ 1 file changed, 2 insertions(+), 12 deletions(-) diff --git a/tests/test_graphql/test_api_devices.py b/tests/test_graphql/test_api_devices.py index a88493c..37d81af 100644 --- a/tests/test_graphql/test_api_devices.py +++ b/tests/test_graphql/test_api_devices.py @@ -330,19 +330,9 @@ def test_graphql_get_and_authorize_new_device(client, authorized_client, tokens_ def test_graphql_authorize_new_device_with_invalid_key( client, authorized_client, tokens_file ): - response = client.post( - "/graphql", - json={ - "query": AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION, - "variables": { - "input": { - "key": "invalid_token", - "deviceName": "test_token", - } - }, - }, - ) + response = graphql_try_auth_new_device(client, "invalid_token", "new_device") assert_errorcode(response, "authorizeWithNewDeviceApiKey", 404) + assert_original(authorized_client) From f5faf84a2b58012f1444e530f033ac17edb95f2a Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 4 Jan 2023 16:49:40 +0000 Subject: [PATCH 042/246] test(tokens-repo): move timewarping to test commons --- tests/common.py | 11 +++++++++++ tests/test_rest_endpoints/test_auth.py | 15 +++++---------- 2 files changed, 16 insertions(+), 10 deletions(-) diff --git a/tests/common.py b/tests/common.py index 18e065c..95488cc 100644 --- a/tests/common.py +++ b/tests/common.py @@ -1,6 +1,17 @@ import json +import datetime from mnemonic import Mnemonic +# for expiration tests. If headache, consider freezegun +RECOVERY_KEY_VALIDATION_DATETIME = "selfprivacy_api.models.tokens.recovery_key.datetime" +DEVICE_KEY_VALIDATION_DATETIME = "selfprivacy_api.models.tokens.new_device_key.datetime" + + +class NearFuture(datetime.datetime): + @classmethod + def now(cls): + return datetime.datetime.now() + datetime.timedelta(minutes=13) + def read_json(file_path): with open(file_path, "r", encoding="utf-8") as file: diff --git a/tests/test_rest_endpoints/test_auth.py b/tests/test_rest_endpoints/test_auth.py index 1872203..1632e22 100644 --- a/tests/test_rest_endpoints/test_auth.py +++ b/tests/test_rest_endpoints/test_auth.py @@ -5,6 +5,11 @@ import datetime import pytest from tests.conftest import TOKENS_FILE_CONTENTS +from tests.common import ( + RECOVERY_KEY_VALIDATION_DATETIME, + DEVICE_KEY_VALIDATION_DATETIME, + NearFuture, +) DATE_FORMATS = [ "%Y-%m-%dT%H:%M:%S.%fZ", @@ -13,16 +18,6 @@ DATE_FORMATS = [ "%Y-%m-%d %H:%M:%S.%f", ] -# for expiration tests. If headache, consider freezegun -RECOVERY_KEY_VALIDATION_DATETIME = "selfprivacy_api.models.tokens.recovery_key.datetime" -DEVICE_KEY_VALIDATION_DATETIME = "selfprivacy_api.models.tokens.new_device_key.datetime" - - -class NearFuture(datetime.datetime): - @classmethod - def now(cls): - return datetime.datetime.now() + datetime.timedelta(minutes=13) - def assert_original(client): new_tokens = rest_get_tokens_info(client) From 1305144112e1851ab09ddc58b0b2978e65ba651e Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 4 Jan 2023 16:58:27 +0000 Subject: [PATCH 043/246] test(tokens-repo): complete untying graphql device tests from json --- tests/test_graphql/test_api_devices.py | 48 ++++++-------------------- 1 file changed, 10 insertions(+), 38 deletions(-) diff --git a/tests/test_graphql/test_api_devices.py b/tests/test_graphql/test_api_devices.py index 37d81af..fce99f7 100644 --- a/tests/test_graphql/test_api_devices.py +++ b/tests/test_graphql/test_api_devices.py @@ -1,11 +1,12 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument # pylint: disable=missing-function-docstring -import datetime -from mnemonic import Mnemonic - - -from tests.common import generate_api_query, read_json, write_json +from tests.common import ( + RECOVERY_KEY_VALIDATION_DATETIME, + DEVICE_KEY_VALIDATION_DATETIME, + NearFuture, + generate_api_query +) from tests.conftest import DEVICE_WE_AUTH_TESTS_WITH, TOKENS_FILE_CONTENTS ORIGINAL_DEVICES = TOKENS_FILE_CONTENTS["tokens"] @@ -349,41 +350,12 @@ def test_graphql_get_and_authorize_used_key(client, authorized_client, tokens_fi def test_graphql_get_and_authorize_key_after_12_minutes( - client, authorized_client, tokens_file + client, authorized_client, tokens_file, mocker ): - response = authorized_client.post( - "/graphql", - json={"query": NEW_DEVICE_KEY_MUTATION}, - ) - assert_ok(response, "getNewDeviceApiKey") - assert ( - response.json()["data"]["getNewDeviceApiKey"]["key"].split(" ").__len__() == 12 - ) - key = ( - Mnemonic(language="english") - .to_entropy(response.json()["data"]["getNewDeviceApiKey"]["key"]) - .hex() - ) - assert read_json(tokens_file)["new_device"]["token"] == key + mnemonic_key = graphql_get_new_device_key(authorized_client) + mock = mocker.patch(DEVICE_KEY_VALIDATION_DATETIME, NearFuture) - file_data = read_json(tokens_file) - file_data["new_device"]["expiration"] = str( - datetime.datetime.now() - datetime.timedelta(minutes=13) - ) - write_json(tokens_file, file_data) - - response = client.post( - "/graphql", - json={ - "query": AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION, - "variables": { - "input": { - "key": key, - "deviceName": "test_token", - } - }, - }, - ) + response = graphql_try_auth_new_device(client, mnemonic_key, "new_device") assert_errorcode(response, "authorizeWithNewDeviceApiKey", 404) From d09cd1bbe1a83fb4d79fe0c9928101bce49fda1b Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 6 Jan 2023 09:54:07 +0000 Subject: [PATCH 044/246] test(tokens-repo): use assert_empty consistently --- tests/test_graphql/common.py | 24 +++++++++++++++++ tests/test_graphql/test_api_devices.py | 34 +++++-------------------- tests/test_graphql/test_api_recovery.py | 7 +++-- tests/test_graphql/test_ssh.py | 7 +++-- tests/test_graphql/test_system.py | 22 ++++++---------- tests/test_graphql/test_users.py | 16 +++++------- 6 files changed, 51 insertions(+), 59 deletions(-) create mode 100644 tests/test_graphql/common.py diff --git a/tests/test_graphql/common.py b/tests/test_graphql/common.py new file mode 100644 index 0000000..f2cc54d --- /dev/null +++ b/tests/test_graphql/common.py @@ -0,0 +1,24 @@ +def assert_ok(response, request): + data = assert_data(response) + data[request]["success"] is True + data[request]["message"] is not None + data[request]["code"] == 200 + + +def assert_errorcode(response, request, code): + data = assert_data(response) + data[request]["success"] is False + data[request]["message"] is not None + data[request]["code"] == code + + +def assert_empty(response): + assert response.status_code == 200 + assert response.json().get("data") is None + + +def assert_data(response): + assert response.status_code == 200 + data = response.json().get("data") + assert data is not None + return data diff --git a/tests/test_graphql/test_api_devices.py b/tests/test_graphql/test_api_devices.py index fce99f7..3db8647 100644 --- a/tests/test_graphql/test_api_devices.py +++ b/tests/test_graphql/test_api_devices.py @@ -5,9 +5,15 @@ from tests.common import ( RECOVERY_KEY_VALIDATION_DATETIME, DEVICE_KEY_VALIDATION_DATETIME, NearFuture, - generate_api_query + generate_api_query, ) from tests.conftest import DEVICE_WE_AUTH_TESTS_WITH, TOKENS_FILE_CONTENTS +from tests.test_graphql.common import ( + assert_data, + assert_empty, + assert_ok, + assert_errorcode, +) ORIGINAL_DEVICES = TOKENS_FILE_CONTENTS["tokens"] @@ -59,32 +65,6 @@ def assert_original(client): assert device["isCaller"] is False -def assert_ok(response, request): - data = assert_data(response) - data[request]["success"] is True - data[request]["message"] is not None - data[request]["code"] == 200 - - -def assert_errorcode(response, request, code): - data = assert_data(response) - data[request]["success"] is False - data[request]["message"] is not None - data[request]["code"] == code - - -def assert_empty(response): - assert response.status_code == 200 - assert response.json().get("data") is None - - -def assert_data(response): - assert response.status_code == 200 - data = response.json().get("data") - assert data is not None - return data - - def set_client_token(client, token): client.headers.update({"Authorization": "Bearer " + token}) diff --git a/tests/test_graphql/test_api_recovery.py b/tests/test_graphql/test_api_recovery.py index 2cb824f..f34f12a 100644 --- a/tests/test_graphql/test_api_recovery.py +++ b/tests/test_graphql/test_api_recovery.py @@ -4,6 +4,7 @@ import datetime from tests.common import generate_api_query, mnemonic_to_hex, read_json, write_json +from tests.test_graphql.common import assert_empty API_RECOVERY_QUERY = """ recoveryKey { @@ -21,8 +22,7 @@ def test_graphql_recovery_key_status_unauthorized(client, tokens_file): "/graphql", json={"query": generate_api_query([API_RECOVERY_QUERY])}, ) - assert response.status_code == 200 - assert response.json().get("data") is None + assert_empty(response) def test_graphql_recovery_key_status_when_none_exists(authorized_client, tokens_file): @@ -354,8 +354,7 @@ def test_graphql_generate_recovery_key_with_invalid_time_format( }, }, ) - assert response.status_code == 200 - assert response.json().get("data") is None + assert_empty(response) assert "recovery_token" not in read_json(tokens_file) diff --git a/tests/test_graphql/test_ssh.py b/tests/test_graphql/test_ssh.py index 4831692..38c40f1 100644 --- a/tests/test_graphql/test_ssh.py +++ b/tests/test_graphql/test_ssh.py @@ -3,6 +3,7 @@ import pytest from tests.common import read_json +from tests.test_graphql.common import assert_empty class ProcessMock: @@ -70,8 +71,7 @@ def test_graphql_add_ssh_key_unauthorized(client, some_users, mock_subprocess_po }, }, ) - assert response.status_code == 200 - assert response.json().get("data") is None + assert_empty(response) def test_graphql_add_ssh_key(authorized_client, some_users, mock_subprocess_popen): @@ -227,8 +227,7 @@ def test_graphql_remove_ssh_key_unauthorized(client, some_users, mock_subprocess }, }, ) - assert response.status_code == 200 - assert response.json().get("data") is None + assert_empty(response) def test_graphql_remove_ssh_key(authorized_client, some_users, mock_subprocess_popen): diff --git a/tests/test_graphql/test_system.py b/tests/test_graphql/test_system.py index a021a16..5fdc06a 100644 --- a/tests/test_graphql/test_system.py +++ b/tests/test_graphql/test_system.py @@ -5,6 +5,7 @@ import os import pytest from tests.common import generate_system_query, read_json +from tests.test_graphql.common import assert_empty @pytest.fixture @@ -144,8 +145,7 @@ def test_graphql_get_python_version_wrong_auth( "query": generate_system_query([API_PYTHON_VERSION_INFO]), }, ) - assert response.status_code == 200 - assert response.json().get("data") is None + assert_empty(response) def test_graphql_get_python_version(authorized_client, mock_subprocess_check_output): @@ -181,8 +181,7 @@ def test_graphql_get_system_version_unauthorized( }, ) - assert response.status_code == 200 - assert response.json().get("data") is None + assert_empty(response) assert mock_subprocess_check_output.call_count == 0 @@ -348,8 +347,7 @@ def test_graphql_get_timezone_unauthorized(client, turned_on): "query": generate_system_query([API_GET_TIMEZONE]), }, ) - assert response.status_code == 200 - assert response.json().get("data") is None + assert_empty(response) def test_graphql_get_timezone(authorized_client, turned_on): @@ -403,8 +401,7 @@ def test_graphql_change_timezone_unauthorized(client, turned_on): }, }, ) - assert response.status_code == 200 - assert response.json().get("data") is None + assert_empty(response) def test_graphql_change_timezone(authorized_client, turned_on): @@ -507,8 +504,7 @@ def test_graphql_get_auto_upgrade_unauthorized(client, turned_on): "query": generate_system_query([API_GET_AUTO_UPGRADE_SETTINGS_QUERY]), }, ) - assert response.status_code == 200 - assert response.json().get("data") is None + assert_empty(response) def test_graphql_get_auto_upgrade(authorized_client, turned_on): @@ -614,8 +610,7 @@ def test_graphql_change_auto_upgrade_unauthorized(client, turned_on): }, }, ) - assert response.status_code == 200 - assert response.json().get("data") is None + assert_empty(response) def test_graphql_change_auto_upgrade(authorized_client, turned_on): @@ -843,8 +838,7 @@ def test_graphql_pull_system_configuration_unauthorized(client, mock_subprocess_ }, ) - assert response.status_code == 200 - assert response.json().get("data") is None + assert_empty(response) assert mock_subprocess_popen.call_count == 0 diff --git a/tests/test_graphql/test_users.py b/tests/test_graphql/test_users.py index 7a65736..503335d 100644 --- a/tests/test_graphql/test_users.py +++ b/tests/test_graphql/test_users.py @@ -6,6 +6,7 @@ from tests.common import ( generate_users_query, read_json, ) +from tests.test_graphql.common import assert_empty invalid_usernames = [ "messagebus", @@ -125,8 +126,7 @@ def test_graphql_get_users_unauthorized(client, some_users, mock_subprocess_pope "query": generate_users_query([API_USERS_INFO]), }, ) - assert response.status_code == 200 - assert response.json().get("data") is None + assert_empty(response) def test_graphql_get_some_users(authorized_client, some_users, mock_subprocess_popen): @@ -192,8 +192,7 @@ def test_graphql_get_one_user_unauthorized(client, one_user, mock_subprocess_pop }, }, ) - assert response.status_code == 200 - assert response.json().get("data") is None + assert_empty(response) def test_graphql_get_one_user(authorized_client, one_user, mock_subprocess_popen): @@ -321,8 +320,7 @@ def test_graphql_add_user_unauthorize(client, one_user, mock_subprocess_popen): }, }, ) - assert response.status_code == 200 - assert response.json().get("data") is None + assert_empty(response) def test_graphql_add_user(authorized_client, one_user, mock_subprocess_popen): @@ -570,8 +568,7 @@ def test_graphql_delete_user_unauthorized(client, some_users, mock_subprocess_po "variables": {"username": "user1"}, }, ) - assert response.status_code == 200 - assert response.json().get("data") is None + assert_empty(response) def test_graphql_delete_user(authorized_client, some_users, mock_subprocess_popen): @@ -675,8 +672,7 @@ def test_graphql_update_user_unauthorized(client, some_users, mock_subprocess_po }, }, ) - assert response.status_code == 200 - assert response.json().get("data") is None + assert_empty(response) def test_graphql_update_user(authorized_client, some_users, mock_subprocess_popen): From 503c9c99effcbee13264616e87dd838b86a2a268 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 6 Jan 2023 10:34:52 +0000 Subject: [PATCH 045/246] test(tokens-repo): break out getting status --- tests/test_graphql/test_api_recovery.py | 37 +++++++++++++++---------- 1 file changed, 22 insertions(+), 15 deletions(-) diff --git a/tests/test_graphql/test_api_recovery.py b/tests/test_graphql/test_api_recovery.py index f34f12a..9e12c0e 100644 --- a/tests/test_graphql/test_api_recovery.py +++ b/tests/test_graphql/test_api_recovery.py @@ -4,7 +4,7 @@ import datetime from tests.common import generate_api_query, mnemonic_to_hex, read_json, write_json -from tests.test_graphql.common import assert_empty +from tests.test_graphql.common import assert_empty, assert_data API_RECOVERY_QUERY = """ recoveryKey { @@ -17,27 +17,34 @@ recoveryKey { """ -def test_graphql_recovery_key_status_unauthorized(client, tokens_file): - response = client.post( +def request_recovery_status(client): + return client.post( "/graphql", json={"query": generate_api_query([API_RECOVERY_QUERY])}, ) + + +def graphql_recovery_status(client): + response = request_recovery_status(client) + data = assert_data(response) + + status = data["api"]["recoveryKey"] + assert status is not None + return status + + +def test_graphql_recovery_key_status_unauthorized(client, tokens_file): + response = request_recovery_status(client) assert_empty(response) def test_graphql_recovery_key_status_when_none_exists(authorized_client, tokens_file): - response = authorized_client.post( - "/graphql", - json={"query": generate_api_query([API_RECOVERY_QUERY])}, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["api"]["recoveryKey"] is not None - assert response.json()["data"]["api"]["recoveryKey"]["exists"] is False - assert response.json()["data"]["api"]["recoveryKey"]["valid"] is False - assert response.json()["data"]["api"]["recoveryKey"]["creationDate"] is None - assert response.json()["data"]["api"]["recoveryKey"]["expirationDate"] is None - assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] is None + status = graphql_recovery_status(authorized_client) + assert status["exists"] is False + assert status["valid"] is False + assert status["creationDate"] is None + assert status["expirationDate"] is None + assert status["usesLeft"] is None API_RECOVERY_KEY_GENERATE_MUTATION = """ From 851d90b30c6d0de73c37e44972b698f42d494f7d Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 6 Jan 2023 10:48:59 +0000 Subject: [PATCH 046/246] test(tokens-repo): break out getting recovery key --- tests/test_graphql/test_api_recovery.py | 35 ++++++++++++------------- 1 file changed, 17 insertions(+), 18 deletions(-) diff --git a/tests/test_graphql/test_api_recovery.py b/tests/test_graphql/test_api_recovery.py index 9e12c0e..dd7d3e4 100644 --- a/tests/test_graphql/test_api_recovery.py +++ b/tests/test_graphql/test_api_recovery.py @@ -4,7 +4,7 @@ import datetime from tests.common import generate_api_query, mnemonic_to_hex, read_json, write_json -from tests.test_graphql.common import assert_empty, assert_data +from tests.test_graphql.common import assert_empty, assert_data, assert_ok API_RECOVERY_QUERY = """ recoveryKey { @@ -33,6 +33,20 @@ def graphql_recovery_status(client): return status +def graphql_get_new_recovery_key(client): + response = client.post( + "/graphql", + json={ + "query": API_RECOVERY_KEY_GENERATE_MUTATION, + }, + ) + assert_ok(response, "getNewRecoveryApiKey") + key = response.json()["data"]["getNewRecoveryApiKey"]["key"] + assert key is not None + assert key.split(" ").__len__() == 18 + return key + + def test_graphql_recovery_key_status_unauthorized(client, tokens_file): response = request_recovery_status(client) assert_empty(response) @@ -71,26 +85,11 @@ mutation TestUseRecoveryKey($input: UseRecoveryKeyInput!) { def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): - response = authorized_client.post( - "/graphql", - json={ - "query": API_RECOVERY_KEY_GENERATE_MUTATION, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is True - assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 200 - assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is not None - assert ( - response.json()["data"]["getNewRecoveryApiKey"]["key"].split(" ").__len__() - == 18 - ) + key = graphql_get_new_recovery_key(authorized_client) + assert read_json(tokens_file)["recovery_token"] is not None time_generated = read_json(tokens_file)["recovery_token"]["date"] assert time_generated is not None - key = response.json()["data"]["getNewRecoveryApiKey"]["key"] assert ( datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%f") - datetime.timedelta(seconds=5) From 6cb9cc6d03d8d71d83c6f4bfdb138a406904b364 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 6 Jan 2023 10:59:59 +0000 Subject: [PATCH 047/246] test(tokens-repo): use assert recent --- tests/common.py | 8 ++++++++ tests/test_graphql/test_api_recovery.py | 15 ++++++++------- tests/test_rest_endpoints/test_auth.py | 9 +-------- 3 files changed, 17 insertions(+), 15 deletions(-) diff --git a/tests/common.py b/tests/common.py index 95488cc..a49885a 100644 --- a/tests/common.py +++ b/tests/common.py @@ -37,3 +37,11 @@ def generate_users_query(query_array): def mnemonic_to_hex(mnemonic): return Mnemonic(language="english").to_entropy(mnemonic).hex() + + +def assert_recovery_recent(time_generated): + assert ( + datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%f") + - datetime.timedelta(seconds=5) + < datetime.datetime.now() + ) diff --git a/tests/test_graphql/test_api_recovery.py b/tests/test_graphql/test_api_recovery.py index dd7d3e4..2f97513 100644 --- a/tests/test_graphql/test_api_recovery.py +++ b/tests/test_graphql/test_api_recovery.py @@ -3,7 +3,13 @@ # pylint: disable=missing-function-docstring import datetime -from tests.common import generate_api_query, mnemonic_to_hex, read_json, write_json +from tests.common import ( + generate_api_query, + mnemonic_to_hex, + read_json, + write_json, + assert_recovery_recent, +) from tests.test_graphql.common import assert_empty, assert_data, assert_ok API_RECOVERY_QUERY = """ @@ -90,12 +96,7 @@ def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): assert read_json(tokens_file)["recovery_token"] is not None time_generated = read_json(tokens_file)["recovery_token"]["date"] assert time_generated is not None - assert ( - datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%f") - - datetime.timedelta(seconds=5) - < datetime.datetime.now() - ) - + assert_recovery_recent(time_generated) # Try to get token status response = authorized_client.post( "/graphql", diff --git a/tests/test_rest_endpoints/test_auth.py b/tests/test_rest_endpoints/test_auth.py index 1632e22..ff161fb 100644 --- a/tests/test_rest_endpoints/test_auth.py +++ b/tests/test_rest_endpoints/test_auth.py @@ -9,6 +9,7 @@ from tests.common import ( RECOVERY_KEY_VALIDATION_DATETIME, DEVICE_KEY_VALIDATION_DATETIME, NearFuture, + assert_recovery_recent, ) DATE_FORMATS = [ @@ -90,14 +91,6 @@ def rest_get_recovery_date(client): return status["date"] -def assert_recovery_recent(time_generated): - assert ( - datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%f") - - datetime.timedelta(seconds=5) - < datetime.datetime.now() - ) - - def assert_no_recovery(client): assert not rest_get_recovery_status(client)["exists"] From 92b2a674799fc965ac6154ab428d89c3c7ecfbec Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 6 Jan 2023 11:08:53 +0000 Subject: [PATCH 048/246] test(tokens-repo): use get recovery status in test of recovery use --- tests/test_graphql/test_api_recovery.py | 25 ++++++------------------- 1 file changed, 6 insertions(+), 19 deletions(-) diff --git a/tests/test_graphql/test_api_recovery.py b/tests/test_graphql/test_api_recovery.py index 2f97513..aacc96c 100644 --- a/tests/test_graphql/test_api_recovery.py +++ b/tests/test_graphql/test_api_recovery.py @@ -93,25 +93,12 @@ mutation TestUseRecoveryKey($input: UseRecoveryKeyInput!) { def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): key = graphql_get_new_recovery_key(authorized_client) - assert read_json(tokens_file)["recovery_token"] is not None - time_generated = read_json(tokens_file)["recovery_token"]["date"] - assert time_generated is not None - assert_recovery_recent(time_generated) - # Try to get token status - response = authorized_client.post( - "/graphql", - json={"query": generate_api_query([API_RECOVERY_QUERY])}, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["api"]["recoveryKey"] is not None - assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True - assert response.json()["data"]["api"]["recoveryKey"]["valid"] is True - assert response.json()["data"]["api"]["recoveryKey"][ - "creationDate" - ] == time_generated.replace("Z", "") - assert response.json()["data"]["api"]["recoveryKey"]["expirationDate"] is None - assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] is None + status = graphql_recovery_status(authorized_client) + assert status["exists"] is True + assert status["valid"] is True + assert_recovery_recent(status["creationDate"]) + assert status["expirationDate"] is None + assert status["usesLeft"] is None # Try to use token response = client.post( From 137ae58b421d5e8734a463aa66e43aead61dc11c Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 6 Jan 2023 11:25:53 +0000 Subject: [PATCH 049/246] test(tokens-repo): break out using recovery key --- tests/test_graphql/test_api_recovery.py | 44 ++++++++++++------------- 1 file changed, 21 insertions(+), 23 deletions(-) diff --git a/tests/test_graphql/test_api_recovery.py b/tests/test_graphql/test_api_recovery.py index aacc96c..20204df 100644 --- a/tests/test_graphql/test_api_recovery.py +++ b/tests/test_graphql/test_api_recovery.py @@ -53,6 +53,25 @@ def graphql_get_new_recovery_key(client): return key +def graphql_use_recovery_key(client, key, device_name): + response = client.post( + "/graphql", + json={ + "query": API_RECOVERY_KEY_USE_MUTATION, + "variables": { + "input": { + "key": key, + "deviceName": device_name, + }, + }, + }, + ) + assert_ok(response, "useRecoveryApiKey") + token = response.json()["data"]["useRecoveryApiKey"]["token"] + assert token is not None + return token + + def test_graphql_recovery_key_status_unauthorized(client, tokens_file): response = request_recovery_status(client) assert_empty(response) @@ -100,29 +119,8 @@ def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): assert status["expirationDate"] is None assert status["usesLeft"] is None - # Try to use token - response = client.post( - "/graphql", - json={ - "query": API_RECOVERY_KEY_USE_MUTATION, - "variables": { - "input": { - "key": key, - "deviceName": "new_test_token", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["useRecoveryApiKey"]["success"] is True - assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 - assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None - assert ( - response.json()["data"]["useRecoveryApiKey"]["token"] - == read_json(tokens_file)["tokens"][2]["token"] - ) + token = graphql_use_recovery_key(client, key, "new_test_token") + assert token == read_json(tokens_file)["tokens"][2]["token"] assert read_json(tokens_file)["tokens"][2]["name"] == "new_test_token" # Try to use token again From de2703219141b8d86bbde8b6423eb7ce35237644 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 6 Jan 2023 11:46:17 +0000 Subject: [PATCH 050/246] test(tokens-repo): move token utils to graphql common --- tests/test_graphql/common.py | 36 ++++++++++++++++++++++++ tests/test_graphql/test_api_devices.py | 38 ++++---------------------- 2 files changed, 42 insertions(+), 32 deletions(-) diff --git a/tests/test_graphql/common.py b/tests/test_graphql/common.py index f2cc54d..7db5b35 100644 --- a/tests/test_graphql/common.py +++ b/tests/test_graphql/common.py @@ -1,3 +1,6 @@ +from tests.common import generate_api_query + + def assert_ok(response, request): data = assert_data(response) data[request]["success"] is True @@ -22,3 +25,36 @@ def assert_data(response): data = response.json().get("data") assert data is not None return data + + +API_DEVICES_QUERY = """ +devices { + creationDate + isCaller + name +} +""" + + +def request_devices(client): + return client.post( + "/graphql", + json={"query": generate_api_query([API_DEVICES_QUERY])}, + ) + + +def graphql_get_devices(client): + response = request_devices(client) + data = assert_data(response) + devices = data["api"]["devices"] + assert devices is not None + return devices + + +def set_client_token(client, token): + client.headers.update({"Authorization": "Bearer " + token}) + + +def assert_token_valid(client, token): + set_client_token(client, token) + assert graphql_get_devices(client) is not None diff --git a/tests/test_graphql/test_api_devices.py b/tests/test_graphql/test_api_devices.py index 3db8647..673ed53 100644 --- a/tests/test_graphql/test_api_devices.py +++ b/tests/test_graphql/test_api_devices.py @@ -13,29 +13,15 @@ from tests.test_graphql.common import ( assert_empty, assert_ok, assert_errorcode, + assert_token_valid, + graphql_get_devices, + request_devices, + set_client_token, + API_DEVICES_QUERY, ) ORIGINAL_DEVICES = TOKENS_FILE_CONTENTS["tokens"] -API_DEVICES_QUERY = """ -devices { - creationDate - isCaller - name -} -""" - - -def graphql_get_devices(client): - response = client.post( - "/graphql", - json={"query": generate_api_query([API_DEVICES_QUERY])}, - ) - data = assert_data(response) - devices = data["api"]["devices"] - assert devices is not None - return devices - def graphql_get_caller_token_info(client): devices = graphql_get_devices(client) @@ -65,15 +51,6 @@ def assert_original(client): assert device["isCaller"] is False -def set_client_token(client, token): - client.headers.update({"Authorization": "Bearer " + token}) - - -def assert_token_valid(client, token): - set_client_token(client, token) - assert graphql_get_devices(client) is not None - - def graphql_get_new_device_key(authorized_client) -> str: response = authorized_client.post( "/graphql", @@ -113,10 +90,7 @@ def test_graphql_tokens_info(authorized_client, tokens_file): def test_graphql_tokens_info_unauthorized(client, tokens_file): - response = client.post( - "/graphql", - json={"query": generate_api_query([API_DEVICES_QUERY])}, - ) + response = request_devices(client) assert_empty(response) From ce4fbdae0a90103b1c06906051885db90ce6df05 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 6 Jan 2023 11:57:51 +0000 Subject: [PATCH 051/246] test(tokens-repo): check for token existense in recovery tests --- tests/test_graphql/test_api_recovery.py | 45 +++++++++---------------- 1 file changed, 16 insertions(+), 29 deletions(-) diff --git a/tests/test_graphql/test_api_recovery.py b/tests/test_graphql/test_api_recovery.py index 20204df..04e4f6e 100644 --- a/tests/test_graphql/test_api_recovery.py +++ b/tests/test_graphql/test_api_recovery.py @@ -10,7 +10,14 @@ from tests.common import ( write_json, assert_recovery_recent, ) -from tests.test_graphql.common import assert_empty, assert_data, assert_ok +from tests.test_graphql.common import ( + assert_empty, + assert_data, + assert_ok, + assert_token_valid, + graphql_get_devices, + set_client_token, +) API_RECOVERY_QUERY = """ recoveryKey { @@ -69,6 +76,11 @@ def graphql_use_recovery_key(client, key, device_name): assert_ok(response, "useRecoveryApiKey") token = response.json()["data"]["useRecoveryApiKey"]["token"] assert token is not None + assert_token_valid(client, token) + set_client_token(client, token) + assert "new_test_token" in [ + device["name"] for device in graphql_get_devices(client) + ] return token @@ -119,34 +131,9 @@ def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): assert status["expirationDate"] is None assert status["usesLeft"] is None - token = graphql_use_recovery_key(client, key, "new_test_token") - assert token == read_json(tokens_file)["tokens"][2]["token"] - assert read_json(tokens_file)["tokens"][2]["name"] == "new_test_token" - - # Try to use token again - response = client.post( - "/graphql", - json={ - "query": API_RECOVERY_KEY_USE_MUTATION, - "variables": { - "input": { - "key": key, - "deviceName": "new_test_token2", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["useRecoveryApiKey"]["success"] is True - assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 - assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None - assert ( - response.json()["data"]["useRecoveryApiKey"]["token"] - == read_json(tokens_file)["tokens"][3]["token"] - ) - assert read_json(tokens_file)["tokens"][3]["name"] == "new_test_token2" + graphql_use_recovery_key(client, key, "new_test_token") + # And again + graphql_use_recovery_key(client, key, "new_test_token2") def test_graphql_generate_recovery_key_with_expiration_date( From 18f5ff815c567770abbda736c9c3e34b8e8f7866 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 6 Jan 2023 13:09:54 +0000 Subject: [PATCH 052/246] test(tokens-repo): rework expiring recovery key tests --- tests/test_graphql/common.py | 24 +++ tests/test_graphql/test_api_devices.py | 26 +-- tests/test_graphql/test_api_recovery.py | 205 +++++++----------------- 3 files changed, 82 insertions(+), 173 deletions(-) diff --git a/tests/test_graphql/common.py b/tests/test_graphql/common.py index 7db5b35..03f48b7 100644 --- a/tests/test_graphql/common.py +++ b/tests/test_graphql/common.py @@ -1,4 +1,7 @@ from tests.common import generate_api_query +from tests.conftest import TOKENS_FILE_CONTENTS, DEVICE_WE_AUTH_TESTS_WITH + +ORIGINAL_DEVICES = TOKENS_FILE_CONTENTS["tokens"] def assert_ok(response, request): @@ -58,3 +61,24 @@ def set_client_token(client, token): def assert_token_valid(client, token): set_client_token(client, token) assert graphql_get_devices(client) is not None + + +def assert_same(graphql_devices, abstract_devices): + """Orderless comparison""" + assert len(graphql_devices) == len(abstract_devices) + for original_device in abstract_devices: + assert original_device["name"] in [device["name"] for device in graphql_devices] + for device in graphql_devices: + if device["name"] == original_device["name"]: + assert device["creationDate"] == original_device["date"].isoformat() + + +def assert_original(client): + devices = graphql_get_devices(client) + assert_same(devices, ORIGINAL_DEVICES) + + for device in devices: + if device["name"] == DEVICE_WE_AUTH_TESTS_WITH["name"]: + assert device["isCaller"] is True + else: + assert device["isCaller"] is False diff --git a/tests/test_graphql/test_api_devices.py b/tests/test_graphql/test_api_devices.py index 673ed53..b9dd808 100644 --- a/tests/test_graphql/test_api_devices.py +++ b/tests/test_graphql/test_api_devices.py @@ -14,14 +14,15 @@ from tests.test_graphql.common import ( assert_ok, assert_errorcode, assert_token_valid, + assert_original, + assert_same, graphql_get_devices, request_devices, set_client_token, API_DEVICES_QUERY, + ORIGINAL_DEVICES, ) -ORIGINAL_DEVICES = TOKENS_FILE_CONTENTS["tokens"] - def graphql_get_caller_token_info(client): devices = graphql_get_devices(client) @@ -30,27 +31,6 @@ def graphql_get_caller_token_info(client): return device -def assert_same(graphql_devices, abstract_devices): - """Orderless comparison""" - assert len(graphql_devices) == len(abstract_devices) - for original_device in abstract_devices: - assert original_device["name"] in [device["name"] for device in graphql_devices] - for device in graphql_devices: - if device["name"] == original_device["name"]: - assert device["creationDate"] == original_device["date"].isoformat() - - -def assert_original(client): - devices = graphql_get_devices(client) - assert_same(devices, ORIGINAL_DEVICES) - - for device in devices: - if device["name"] == DEVICE_WE_AUTH_TESTS_WITH["name"]: - assert device["isCaller"] is True - else: - assert device["isCaller"] is False - - def graphql_get_new_device_key(authorized_client) -> str: response = authorized_client.post( "/graphql", diff --git a/tests/test_graphql/test_api_recovery.py b/tests/test_graphql/test_api_recovery.py index 04e4f6e..47332aa 100644 --- a/tests/test_graphql/test_api_recovery.py +++ b/tests/test_graphql/test_api_recovery.py @@ -9,12 +9,16 @@ from tests.common import ( read_json, write_json, assert_recovery_recent, + NearFuture, + RECOVERY_KEY_VALIDATION_DATETIME, ) from tests.test_graphql.common import ( assert_empty, assert_data, assert_ok, + assert_errorcode, assert_token_valid, + assert_original, graphql_get_devices, set_client_token, ) @@ -46,13 +50,24 @@ def graphql_recovery_status(client): return status -def graphql_get_new_recovery_key(client): - response = client.post( - "/graphql", - json={ - "query": API_RECOVERY_KEY_GENERATE_MUTATION, - }, - ) +def request_make_new_recovery_key(client, expires_at=None, uses=None): + json = {"query": API_RECOVERY_KEY_GENERATE_MUTATION} + limits = {} + + if expires_at is not None: + limits["expirationDate"] = expires_at.isoformat() + if uses is not None: + limits["uses"] = uses + + if limits != {}: + json["variables"] = {"limits": limits} + + response = client.post("/graphql", json=json) + return response + + +def graphql_make_new_recovery_key(client, expires_at=None, uses=None): + response = request_make_new_recovery_key(client, expires_at, uses) assert_ok(response, "getNewRecoveryApiKey") key = response.json()["data"]["getNewRecoveryApiKey"]["key"] assert key is not None @@ -60,8 +75,8 @@ def graphql_get_new_recovery_key(client): return key -def graphql_use_recovery_key(client, key, device_name): - response = client.post( +def request_recovery_auth(client, key, device_name): + return client.post( "/graphql", json={ "query": API_RECOVERY_KEY_USE_MUTATION, @@ -73,6 +88,10 @@ def graphql_use_recovery_key(client, key, device_name): }, }, ) + + +def graphql_use_recovery_key(client, key, device_name): + response = request_recovery_auth(client, key, device_name) assert_ok(response, "useRecoveryApiKey") token = response.json()["data"]["useRecoveryApiKey"]["token"] assert token is not None @@ -122,7 +141,7 @@ mutation TestUseRecoveryKey($input: UseRecoveryKeyInput!) { def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): - key = graphql_get_new_recovery_key(authorized_client) + key = graphql_make_new_recovery_key(authorized_client) status = graphql_recovery_status(authorized_client) assert status["exists"] is True @@ -140,154 +159,40 @@ def test_graphql_generate_recovery_key_with_expiration_date( client, authorized_client, tokens_file ): expiration_date = datetime.datetime.now() + datetime.timedelta(minutes=5) - expiration_date_str = expiration_date.strftime("%Y-%m-%dT%H:%M:%S.%f") - response = authorized_client.post( - "/graphql", - json={ - "query": API_RECOVERY_KEY_GENERATE_MUTATION, - "variables": { - "limits": { - "expirationDate": expiration_date_str, - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is True - assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 200 - assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is not None - assert ( - response.json()["data"]["getNewRecoveryApiKey"]["key"].split(" ").__len__() - == 18 - ) - assert read_json(tokens_file)["recovery_token"] is not None + key = graphql_make_new_recovery_key(authorized_client, expires_at=expiration_date) - key = response.json()["data"]["getNewRecoveryApiKey"]["key"] - assert read_json(tokens_file)["recovery_token"]["expiration"] == expiration_date_str - assert read_json(tokens_file)["recovery_token"]["token"] == mnemonic_to_hex(key) + status = graphql_recovery_status(authorized_client) + assert status["exists"] is True + assert status["valid"] is True + assert_recovery_recent(status["creationDate"]) + assert status["expirationDate"] == expiration_date.isoformat() + assert status["usesLeft"] is None - time_generated = read_json(tokens_file)["recovery_token"]["date"] - assert time_generated is not None - assert ( - datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%f") - - datetime.timedelta(seconds=5) - < datetime.datetime.now() - ) + graphql_use_recovery_key(client, key, "new_test_token") + # And again + graphql_use_recovery_key(client, key, "new_test_token2") - # Try to get token status - response = authorized_client.post( - "/graphql", - json={"query": generate_api_query([API_RECOVERY_QUERY])}, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["api"]["recoveryKey"] is not None - assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True - assert response.json()["data"]["api"]["recoveryKey"]["valid"] is True - assert response.json()["data"]["api"]["recoveryKey"][ - "creationDate" - ] == time_generated.replace("Z", "") - assert ( - response.json()["data"]["api"]["recoveryKey"]["expirationDate"] - == expiration_date_str - ) - assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] is None - # Try to use token - response = authorized_client.post( - "/graphql", - json={ - "query": API_RECOVERY_KEY_USE_MUTATION, - "variables": { - "input": { - "key": key, - "deviceName": "new_test_token", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["useRecoveryApiKey"]["success"] is True - assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 - assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None - assert ( - response.json()["data"]["useRecoveryApiKey"]["token"] - == read_json(tokens_file)["tokens"][2]["token"] - ) +def test_graphql_use_recovery_key_after_expiration( + client, authorized_client, tokens_file, mocker +): + expiration_date = datetime.datetime.now() + datetime.timedelta(minutes=5) + key = graphql_make_new_recovery_key(authorized_client, expires_at=expiration_date) - # Try to use token again - response = authorized_client.post( - "/graphql", - json={ - "query": API_RECOVERY_KEY_USE_MUTATION, - "variables": { - "input": { - "key": key, - "deviceName": "new_test_token2", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["useRecoveryApiKey"]["success"] is True - assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 - assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None - assert ( - response.json()["data"]["useRecoveryApiKey"]["token"] - == read_json(tokens_file)["tokens"][3]["token"] - ) + # Timewarp to after it expires + mock = mocker.patch(RECOVERY_KEY_VALIDATION_DATETIME, NearFuture) - # Try to use token after expiration date - new_data = read_json(tokens_file) - new_data["recovery_token"]["expiration"] = ( - datetime.datetime.now() - datetime.timedelta(minutes=5) - ).strftime("%Y-%m-%dT%H:%M:%S.%f") - write_json(tokens_file, new_data) - response = authorized_client.post( - "/graphql", - json={ - "query": API_RECOVERY_KEY_USE_MUTATION, - "variables": { - "input": { - "key": key, - "deviceName": "new_test_token3", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["useRecoveryApiKey"]["success"] is False - assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["useRecoveryApiKey"]["code"] == 404 + response = request_recovery_auth(client, key, "new_test_token3") + assert_errorcode(response, "useRecoveryApiKey", 404) assert response.json()["data"]["useRecoveryApiKey"]["token"] is None + assert_original(authorized_client) - assert read_json(tokens_file)["tokens"] == new_data["tokens"] - - # Try to get token status - response = authorized_client.post( - "/graphql", - json={"query": generate_api_query([API_RECOVERY_QUERY])}, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["api"]["recoveryKey"] is not None - assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True - assert response.json()["data"]["api"]["recoveryKey"]["valid"] is False - assert ( - response.json()["data"]["api"]["recoveryKey"]["creationDate"] == time_generated - ) - assert ( - response.json()["data"]["api"]["recoveryKey"]["expirationDate"] - == new_data["recovery_token"]["expiration"] - ) - assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] is None + status = graphql_recovery_status(authorized_client) + assert status["exists"] is True + assert status["valid"] is False + assert_recovery_recent(status["creationDate"]) + assert status["expirationDate"] == expiration_date.isoformat() + assert status["usesLeft"] is None def test_graphql_generate_recovery_key_with_expiration_in_the_past( From 2d6406c8c1eb9f2cd8ae70c10d9ac69e0437d275 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 9 Jan 2023 12:17:36 +0000 Subject: [PATCH 053/246] test(tokens-repo): rework recovery expiration in the past --- tests/test_graphql/test_api_recovery.py | 22 ++++------------------ 1 file changed, 4 insertions(+), 18 deletions(-) diff --git a/tests/test_graphql/test_api_recovery.py b/tests/test_graphql/test_api_recovery.py index 47332aa..a02b582 100644 --- a/tests/test_graphql/test_api_recovery.py +++ b/tests/test_graphql/test_api_recovery.py @@ -199,26 +199,12 @@ def test_graphql_generate_recovery_key_with_expiration_in_the_past( authorized_client, tokens_file ): expiration_date = datetime.datetime.now() - datetime.timedelta(minutes=5) - expiration_date_str = expiration_date.strftime("%Y-%m-%dT%H:%M:%S.%f") - - response = authorized_client.post( - "/graphql", - json={ - "query": API_RECOVERY_KEY_GENERATE_MUTATION, - "variables": { - "limits": { - "expirationDate": expiration_date_str, - }, - }, - }, + response = request_make_new_recovery_key( + authorized_client, expires_at=expiration_date ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is False - assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 400 + + assert_errorcode(response, "getNewRecoveryApiKey", 400) assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is None - assert "recovery_token" not in read_json(tokens_file) def test_graphql_generate_recovery_key_with_invalid_time_format( From 0b28fa2637ddd7f4ec043b9afcd762053d93a7d5 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 9 Jan 2023 12:39:54 +0000 Subject: [PATCH 054/246] test(tokens-repo): rework limited uses test --- tests/test_graphql/test_api_recovery.py | 149 ++++-------------------- 1 file changed, 25 insertions(+), 124 deletions(-) diff --git a/tests/test_graphql/test_api_recovery.py b/tests/test_graphql/test_api_recovery.py index a02b582..c6ccbf9 100644 --- a/tests/test_graphql/test_api_recovery.py +++ b/tests/test_graphql/test_api_recovery.py @@ -97,9 +97,7 @@ def graphql_use_recovery_key(client, key, device_name): assert token is not None assert_token_valid(client, token) set_client_token(client, token) - assert "new_test_token" in [ - device["name"] for device in graphql_get_devices(client) - ] + assert device_name in [device["name"] for device in graphql_get_devices(client)] return token @@ -230,135 +228,38 @@ def test_graphql_generate_recovery_key_with_invalid_time_format( def test_graphql_generate_recovery_key_with_limited_uses( - authorized_client, tokens_file + authorized_client, client, tokens_file ): - response = authorized_client.post( - "/graphql", - json={ - "query": API_RECOVERY_KEY_GENERATE_MUTATION, - "variables": { - "limits": { - "expirationDate": None, - "uses": 2, - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is True - assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 200 - assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is not None + mnemonic_key = graphql_make_new_recovery_key(authorized_client, uses=2) - mnemonic_key = response.json()["data"]["getNewRecoveryApiKey"]["key"] - key = mnemonic_to_hex(mnemonic_key) + status = graphql_recovery_status(authorized_client) + assert status["exists"] is True + assert status["valid"] is True + assert status["creationDate"] is not None + assert status["expirationDate"] is None + assert status["usesLeft"] == 2 - assert read_json(tokens_file)["recovery_token"]["token"] == key - assert read_json(tokens_file)["recovery_token"]["uses_left"] == 2 + graphql_use_recovery_key(client, mnemonic_key, "new_test_token1") - # Try to get token status - response = authorized_client.post( - "/graphql", - json={"query": generate_api_query([API_RECOVERY_QUERY])}, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["api"]["recoveryKey"] is not None - assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True - assert response.json()["data"]["api"]["recoveryKey"]["valid"] is True - assert response.json()["data"]["api"]["recoveryKey"]["creationDate"] is not None - assert response.json()["data"]["api"]["recoveryKey"]["expirationDate"] is None - assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] == 2 + status = graphql_recovery_status(authorized_client) + assert status["exists"] is True + assert status["valid"] is True + assert status["creationDate"] is not None + assert status["expirationDate"] is None + assert status["usesLeft"] == 1 - # Try to use token - response = authorized_client.post( - "/graphql", - json={ - "query": API_RECOVERY_KEY_USE_MUTATION, - "variables": { - "input": { - "key": mnemonic_key, - "deviceName": "test_token1", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["useRecoveryApiKey"]["success"] is True - assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 - assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None + graphql_use_recovery_key(client, mnemonic_key, "new_test_token2") - # Try to get token status - response = authorized_client.post( - "/graphql", - json={"query": generate_api_query([API_RECOVERY_QUERY])}, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["api"]["recoveryKey"] is not None - assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True - assert response.json()["data"]["api"]["recoveryKey"]["valid"] is True - assert response.json()["data"]["api"]["recoveryKey"]["creationDate"] is not None - assert response.json()["data"]["api"]["recoveryKey"]["expirationDate"] is None - assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] == 1 + status = graphql_recovery_status(authorized_client) + assert status["exists"] is True + assert status["valid"] is False + assert status["creationDate"] is not None + assert status["expirationDate"] is None + assert status["usesLeft"] == 0 - # Try to use token - response = authorized_client.post( - "/graphql", - json={ - "query": API_RECOVERY_KEY_USE_MUTATION, - "variables": { - "input": { - "key": mnemonic_key, - "deviceName": "test_token2", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["useRecoveryApiKey"]["success"] is True - assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["useRecoveryApiKey"]["code"] == 200 - assert response.json()["data"]["useRecoveryApiKey"]["token"] is not None - - # Try to get token status - response = authorized_client.post( - "/graphql", - json={"query": generate_api_query([API_RECOVERY_QUERY])}, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["api"]["recoveryKey"] is not None - assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True - assert response.json()["data"]["api"]["recoveryKey"]["valid"] is False - assert response.json()["data"]["api"]["recoveryKey"]["creationDate"] is not None - assert response.json()["data"]["api"]["recoveryKey"]["expirationDate"] is None - assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] == 0 - - # Try to use token - response = authorized_client.post( - "/graphql", - json={ - "query": API_RECOVERY_KEY_USE_MUTATION, - "variables": { - "input": { - "key": mnemonic_key, - "deviceName": "test_token3", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["useRecoveryApiKey"]["success"] is False - assert response.json()["data"]["useRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["useRecoveryApiKey"]["code"] == 404 - assert response.json()["data"]["useRecoveryApiKey"]["token"] is None + response = request_recovery_auth(client, mnemonic_key, "new_test_token3") + assert_errorcode(response, "useRecoveryApiKey", 404) def test_graphql_generate_recovery_key_with_negative_uses( From 72fdd412d9f7fa6c8cfd59d0e2b084de9c0e1230 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 9 Jan 2023 12:44:48 +0000 Subject: [PATCH 055/246] test(tokens-repo): complete the recovery test rework --- tests/test_graphql/test_api_recovery.py | 40 ++++--------------------- 1 file changed, 6 insertions(+), 34 deletions(-) diff --git a/tests/test_graphql/test_api_recovery.py b/tests/test_graphql/test_api_recovery.py index c6ccbf9..d7ce667 100644 --- a/tests/test_graphql/test_api_recovery.py +++ b/tests/test_graphql/test_api_recovery.py @@ -265,42 +265,14 @@ def test_graphql_generate_recovery_key_with_limited_uses( def test_graphql_generate_recovery_key_with_negative_uses( authorized_client, tokens_file ): - # Try to get token status - response = authorized_client.post( - "/graphql", - json={ - "query": API_RECOVERY_KEY_GENERATE_MUTATION, - "variables": { - "limits": { - "uses": -1, - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is False - assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 400 + response = request_make_new_recovery_key(authorized_client, uses=-1) + + assert_errorcode(response, "getNewRecoveryApiKey", 400) assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is None def test_graphql_generate_recovery_key_with_zero_uses(authorized_client, tokens_file): - # Try to get token status - response = authorized_client.post( - "/graphql", - json={ - "query": API_RECOVERY_KEY_GENERATE_MUTATION, - "variables": { - "limits": { - "uses": 0, - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["getNewRecoveryApiKey"]["success"] is False - assert response.json()["data"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["getNewRecoveryApiKey"]["code"] == 400 + response = request_make_new_recovery_key(authorized_client, uses=0) + + assert_errorcode(response, "getNewRecoveryApiKey", 400) assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is None From e5756a0dd1e0131d8e52183e49b50029aaf061cd Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 9 Jan 2023 12:54:10 +0000 Subject: [PATCH 056/246] test(tokens-repo): cleanup recovery tests --- tests/test_graphql/test_api_recovery.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/tests/test_graphql/test_api_recovery.py b/tests/test_graphql/test_api_recovery.py index d7ce667..9d6e671 100644 --- a/tests/test_graphql/test_api_recovery.py +++ b/tests/test_graphql/test_api_recovery.py @@ -5,9 +5,6 @@ import datetime from tests.common import ( generate_api_query, - mnemonic_to_hex, - read_json, - write_json, assert_recovery_recent, NearFuture, RECOVERY_KEY_VALIDATION_DATETIME, @@ -203,6 +200,7 @@ def test_graphql_generate_recovery_key_with_expiration_in_the_past( assert_errorcode(response, "getNewRecoveryApiKey", 400) assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is None + assert graphql_recovery_status(authorized_client)["exists"] is False def test_graphql_generate_recovery_key_with_invalid_time_format( @@ -223,8 +221,7 @@ def test_graphql_generate_recovery_key_with_invalid_time_format( }, ) assert_empty(response) - - assert "recovery_token" not in read_json(tokens_file) + assert graphql_recovery_status(authorized_client)["exists"] is False def test_graphql_generate_recovery_key_with_limited_uses( @@ -276,3 +273,4 @@ def test_graphql_generate_recovery_key_with_zero_uses(authorized_client, tokens_ assert_errorcode(response, "getNewRecoveryApiKey", 400) assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is None + assert graphql_recovery_status(authorized_client)["exists"] is False From 9cc6e304c0ff91140f20cf147a37296152384410 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 9 Jan 2023 15:29:43 +0000 Subject: [PATCH 057/246] test(tokens-repo): remove device order dependence from graphql test__api --- tests/test_graphql/common.py | 4 ++++ tests/test_graphql/test_api.py | 20 ++++++-------------- 2 files changed, 10 insertions(+), 14 deletions(-) diff --git a/tests/test_graphql/common.py b/tests/test_graphql/common.py index 03f48b7..d473433 100644 --- a/tests/test_graphql/common.py +++ b/tests/test_graphql/common.py @@ -75,6 +75,10 @@ def assert_same(graphql_devices, abstract_devices): def assert_original(client): devices = graphql_get_devices(client) + assert_original_devices(devices) + + +def assert_original_devices(devices): assert_same(devices, ORIGINAL_DEVICES) for device in devices: diff --git a/tests/test_graphql/test_api.py b/tests/test_graphql/test_api.py index 695dd8e..c252d44 100644 --- a/tests/test_graphql/test_api.py +++ b/tests/test_graphql/test_api.py @@ -3,6 +3,7 @@ # pylint: disable=missing-function-docstring from tests.common import generate_api_query +from tests.test_graphql.common import assert_original_devices from tests.test_graphql.test_api_devices import API_DEVICES_QUERY from tests.test_graphql.test_api_recovery import API_RECOVERY_QUERY from tests.test_graphql.test_api_version import API_VERSION_QUERY @@ -20,20 +21,11 @@ def test_graphql_get_entire_api_data(authorized_client, tokens_file): assert response.status_code == 200 assert response.json().get("data") is not None assert "version" in response.json()["data"]["api"] - assert response.json()["data"]["api"]["devices"] is not None - assert len(response.json()["data"]["api"]["devices"]) == 2 - assert ( - response.json()["data"]["api"]["devices"][0]["creationDate"] - == "2022-01-14T08:31:10.789314" - ) - assert response.json()["data"]["api"]["devices"][0]["isCaller"] is True - assert response.json()["data"]["api"]["devices"][0]["name"] == "test_token" - assert ( - response.json()["data"]["api"]["devices"][1]["creationDate"] - == "2022-01-14T08:31:10.789314" - ) - assert response.json()["data"]["api"]["devices"][1]["isCaller"] is False - assert response.json()["data"]["api"]["devices"][1]["name"] == "test_token2" + + devices = response.json()["data"]["api"]["devices"] + assert devices is not None + assert_original_devices(devices) + assert response.json()["data"]["api"]["recoveryKey"] is not None assert response.json()["data"]["api"]["recoveryKey"]["exists"] is False assert response.json()["data"]["api"]["recoveryKey"]["valid"] is False From 158c1f13a6425d726bfa0810d4f4bb58e6b7dc6a Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 11 Jan 2023 17:02:01 +0000 Subject: [PATCH 058/246] refactor(tokens-repo): switch token backend to redis And use timezone-aware comparisons for expiry checks --- selfprivacy_api/actions/api_tokens.py | 20 +++++-- .../models/tokens/new_device_key.py | 10 ++-- selfprivacy_api/models/tokens/recovery_key.py | 10 +++- selfprivacy_api/models/tokens/time.py | 13 +++++ .../tokens/json_tokens_repository.py | 25 ++++++++- .../tokens/redis_tokens_repository.py | 6 +- tests/common.py | 22 +++++--- tests/conftest.py | 22 +++++--- tests/test_graphql/test_api_recovery.py | 12 ++-- .../test_repository/test_tokens_repository.py | 56 +++++++++---------- tests/test_rest_endpoints/test_auth.py | 10 ++-- 11 files changed, 136 insertions(+), 70 deletions(-) create mode 100644 selfprivacy_api/models/tokens/time.py diff --git a/selfprivacy_api/actions/api_tokens.py b/selfprivacy_api/actions/api_tokens.py index 38133fd..2337224 100644 --- a/selfprivacy_api/actions/api_tokens.py +++ b/selfprivacy_api/actions/api_tokens.py @@ -1,11 +1,11 @@ """App tokens actions""" -from datetime import datetime +from datetime import datetime, timezone from typing import Optional from pydantic import BaseModel from mnemonic import Mnemonic -from selfprivacy_api.repositories.tokens.json_tokens_repository import ( - JsonTokensRepository, +from selfprivacy_api.repositories.tokens.redis_tokens_repository import ( + RedisTokensRepository, ) from selfprivacy_api.repositories.tokens.exceptions import ( TokenNotFound, @@ -14,7 +14,7 @@ from selfprivacy_api.repositories.tokens.exceptions import ( NewDeviceKeyNotFound, ) -TOKEN_REPO = JsonTokensRepository() +TOKEN_REPO = RedisTokensRepository() class TokenInfoWithIsCaller(BaseModel): @@ -82,6 +82,14 @@ class RecoveryTokenStatus(BaseModel): uses_left: Optional[int] = None +def naive(date_time: datetime) -> datetime: + if date_time is None: + return None + if date_time.tzinfo is not None: + date_time.astimezone(timezone.utc) + return date_time.replace(tzinfo=None) + + def get_api_recovery_token_status() -> RecoveryTokenStatus: """Get the recovery token status""" token = TOKEN_REPO.get_recovery_key() @@ -91,8 +99,8 @@ def get_api_recovery_token_status() -> RecoveryTokenStatus: return RecoveryTokenStatus( exists=True, valid=is_valid, - date=token.created_at, - expiration=token.expires_at, + date=naive(token.created_at), + expiration=naive(token.expires_at), uses_left=token.uses_left, ) diff --git a/selfprivacy_api/models/tokens/new_device_key.py b/selfprivacy_api/models/tokens/new_device_key.py index dda926c..9fbd23b 100644 --- a/selfprivacy_api/models/tokens/new_device_key.py +++ b/selfprivacy_api/models/tokens/new_device_key.py @@ -1,11 +1,13 @@ """ New device key used to obtain access token. """ -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone import secrets from pydantic import BaseModel from mnemonic import Mnemonic +from selfprivacy_api.models.tokens.time import is_past + class NewDeviceKey(BaseModel): """ @@ -22,7 +24,7 @@ class NewDeviceKey(BaseModel): """ Check if the recovery key is valid. """ - if self.expires_at < datetime.now(): + if is_past(self.expires_at): return False return True @@ -37,10 +39,10 @@ class NewDeviceKey(BaseModel): """ Factory to generate a random token. """ - creation_date = datetime.now() + creation_date = datetime.now(timezone.utc) key = secrets.token_bytes(16).hex() return NewDeviceKey( key=key, created_at=creation_date, - expires_at=datetime.now() + timedelta(minutes=10), + expires_at=creation_date + timedelta(minutes=10), ) diff --git a/selfprivacy_api/models/tokens/recovery_key.py b/selfprivacy_api/models/tokens/recovery_key.py index 098aceb..3b81398 100644 --- a/selfprivacy_api/models/tokens/recovery_key.py +++ b/selfprivacy_api/models/tokens/recovery_key.py @@ -3,12 +3,14 @@ Recovery key used to obtain access token. Recovery key has a token string, date of creation, optional date of expiration and optional count of uses left. """ -from datetime import datetime +from datetime import datetime, timezone import secrets from typing import Optional from pydantic import BaseModel from mnemonic import Mnemonic +from selfprivacy_api.models.tokens.time import is_past, ensure_timezone + class RecoveryKey(BaseModel): """ @@ -26,7 +28,7 @@ class RecoveryKey(BaseModel): """ Check if the recovery key is valid. """ - if self.expires_at is not None and self.expires_at < datetime.now(): + if self.expires_at is not None and is_past(self.expires_at): return False if self.uses_left is not None and self.uses_left <= 0: return False @@ -46,7 +48,9 @@ class RecoveryKey(BaseModel): """ Factory to generate a random token. """ - creation_date = datetime.now() + creation_date = datetime.now(timezone.utc) + if expiration is not None: + expiration = ensure_timezone(expiration) key = secrets.token_bytes(24).hex() return RecoveryKey( key=key, diff --git a/selfprivacy_api/models/tokens/time.py b/selfprivacy_api/models/tokens/time.py new file mode 100644 index 0000000..35fd992 --- /dev/null +++ b/selfprivacy_api/models/tokens/time.py @@ -0,0 +1,13 @@ +from datetime import datetime, timezone + +def is_past(dt: datetime) -> bool: + # we cannot compare a naive now() + # to dt which might be tz-aware or unaware + dt = ensure_timezone(dt) + return dt < datetime.now(timezone.utc) + +def ensure_timezone(dt:datetime) -> datetime: + if dt.tzinfo is None or dt.tzinfo.utcoffset(None) is None: + dt = dt.replace(tzinfo= timezone.utc) + return dt + diff --git a/selfprivacy_api/repositories/tokens/json_tokens_repository.py b/selfprivacy_api/repositories/tokens/json_tokens_repository.py index 77e1311..09204a8 100644 --- a/selfprivacy_api/repositories/tokens/json_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/json_tokens_repository.py @@ -2,7 +2,7 @@ temporary legacy """ from typing import Optional -from datetime import datetime +from datetime import datetime, timezone from selfprivacy_api.utils import UserDataFiles, WriteUserData, ReadUserData from selfprivacy_api.models.tokens.token import Token @@ -15,6 +15,7 @@ from selfprivacy_api.repositories.tokens.abstract_tokens_repository import ( AbstractTokensRepository, ) + DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%f" @@ -56,6 +57,20 @@ class JsonTokensRepository(AbstractTokensRepository): raise TokenNotFound("Token not found!") + def __key_date_from_str(self, date_string: str) -> datetime: + if date_string is None or date_string == "": + return None + # we assume that we store dates in json as naive utc + utc_no_tz = datetime.fromisoformat(date_string) + utc_with_tz = utc_no_tz.replace(tzinfo=timezone.utc) + return utc_with_tz + + def __date_from_tokens_file( + self, tokens_file: object, tokenfield: str, datefield: str + ): + date_string = tokens_file[tokenfield].get(datefield) + return self.__key_date_from_str(date_string) + def get_recovery_key(self) -> Optional[RecoveryKey]: """Get the recovery key""" with ReadUserData(UserDataFiles.TOKENS) as tokens_file: @@ -68,8 +83,12 @@ class JsonTokensRepository(AbstractTokensRepository): recovery_key = RecoveryKey( key=tokens_file["recovery_token"].get("token"), - created_at=tokens_file["recovery_token"].get("date"), - expires_at=tokens_file["recovery_token"].get("expiration"), + created_at=self.__date_from_tokens_file( + tokens_file, "recovery_token", "date" + ), + expires_at=self.__date_from_tokens_file( + tokens_file, "recovery_token", "expiration" + ), uses_left=tokens_file["recovery_token"].get("uses_left"), ) diff --git a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py index c72e231..a16b79d 100644 --- a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py @@ -2,7 +2,7 @@ Token repository using Redis as backend. """ from typing import Optional -from datetime import datetime +from datetime import datetime, timezone from selfprivacy_api.repositories.tokens.abstract_tokens_repository import ( AbstractTokensRepository, @@ -38,6 +38,8 @@ class RedisTokensRepository(AbstractTokensRepository): for key in token_keys: token = self._token_from_hash(key) if token is not None: + # token creation dates are temporarily not tz-aware + token.created_at = token.created_at.replace(tzinfo=None) tokens.append(token) return tokens @@ -150,5 +152,7 @@ class RedisTokensRepository(AbstractTokensRepository): redis = self.connection for key, value in model.dict().items(): if isinstance(value, datetime): + if value.tzinfo is None: + value = value.replace(tzinfo=timezone.utc) value = value.isoformat() redis.hset(redis_key, key, str(value)) diff --git a/tests/common.py b/tests/common.py index a49885a..08ddc66 100644 --- a/tests/common.py +++ b/tests/common.py @@ -1,16 +1,21 @@ import json -import datetime +from datetime import datetime, timezone, timedelta from mnemonic import Mnemonic # for expiration tests. If headache, consider freezegun -RECOVERY_KEY_VALIDATION_DATETIME = "selfprivacy_api.models.tokens.recovery_key.datetime" -DEVICE_KEY_VALIDATION_DATETIME = "selfprivacy_api.models.tokens.new_device_key.datetime" +RECOVERY_KEY_VALIDATION_DATETIME = "selfprivacy_api.models.tokens.time.datetime" +DEVICE_KEY_VALIDATION_DATETIME = RECOVERY_KEY_VALIDATION_DATETIME + +FIVE_MINUTES_INTO_FUTURE_NAIVE = datetime.now() + timedelta(minutes=5) +FIVE_MINUTES_INTO_FUTURE = datetime.now(timezone.utc) + timedelta(minutes=5) +FIVE_MINUTES_INTO_PAST_NAIVE = datetime.now() - timedelta(minutes=5) +FIVE_MINUTES_INTO_PAST = datetime.now(timezone.utc) - timedelta(minutes=5) -class NearFuture(datetime.datetime): +class NearFuture(datetime): @classmethod - def now(cls): - return datetime.datetime.now() + datetime.timedelta(minutes=13) + def now(cls, tz=None): + return datetime.now(tz) + timedelta(minutes=13) def read_json(file_path): @@ -41,7 +46,6 @@ def mnemonic_to_hex(mnemonic): def assert_recovery_recent(time_generated): assert ( - datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%f") - - datetime.timedelta(seconds=5) - < datetime.datetime.now() + datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%f") - timedelta(seconds=5) + < datetime.now() ) diff --git a/tests/conftest.py b/tests/conftest.py index 212b6da..52ded90 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -11,6 +11,9 @@ from selfprivacy_api.models.tokens.token import Token from selfprivacy_api.repositories.tokens.json_tokens_repository import ( JsonTokensRepository, ) +from selfprivacy_api.repositories.tokens.redis_tokens_repository import ( + RedisTokensRepository, +) from tests.common import read_json @@ -63,21 +66,26 @@ def empty_json_repo(empty_tokens): @pytest.fixture -def tokens_file(empty_json_repo, tmpdir): +def empty_redis_repo(): + repo = RedisTokensRepository() + repo.reset() + assert repo.get_tokens() == [] + return repo + + +@pytest.fixture +def tokens_file(empty_redis_repo, tmpdir): """A state with tokens""" + repo = empty_redis_repo for token in TOKENS_FILE_CONTENTS["tokens"]: - empty_json_repo._store_token( + repo._store_token( Token( token=token["token"], device_name=token["name"], created_at=token["date"], ) ) - # temporary return for compatibility with older tests - - tokenfile = tmpdir / "empty_tokens.json" - assert path.exists(tokenfile) - return tokenfile + return repo @pytest.fixture diff --git a/tests/test_graphql/test_api_recovery.py b/tests/test_graphql/test_api_recovery.py index 9d6e671..a19eae2 100644 --- a/tests/test_graphql/test_api_recovery.py +++ b/tests/test_graphql/test_api_recovery.py @@ -1,7 +1,6 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument # pylint: disable=missing-function-docstring -import datetime from tests.common import ( generate_api_query, @@ -9,6 +8,11 @@ from tests.common import ( NearFuture, RECOVERY_KEY_VALIDATION_DATETIME, ) + +# Graphql API's output should be timezone-naive +from tests.common import FIVE_MINUTES_INTO_FUTURE_NAIVE as FIVE_MINUTES_INTO_FUTURE +from tests.common import FIVE_MINUTES_INTO_PAST_NAIVE as FIVE_MINUTES_INTO_PAST + from tests.test_graphql.common import ( assert_empty, assert_data, @@ -153,7 +157,7 @@ def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): def test_graphql_generate_recovery_key_with_expiration_date( client, authorized_client, tokens_file ): - expiration_date = datetime.datetime.now() + datetime.timedelta(minutes=5) + expiration_date = FIVE_MINUTES_INTO_FUTURE key = graphql_make_new_recovery_key(authorized_client, expires_at=expiration_date) status = graphql_recovery_status(authorized_client) @@ -171,7 +175,7 @@ def test_graphql_generate_recovery_key_with_expiration_date( def test_graphql_use_recovery_key_after_expiration( client, authorized_client, tokens_file, mocker ): - expiration_date = datetime.datetime.now() + datetime.timedelta(minutes=5) + expiration_date = FIVE_MINUTES_INTO_FUTURE key = graphql_make_new_recovery_key(authorized_client, expires_at=expiration_date) # Timewarp to after it expires @@ -193,7 +197,7 @@ def test_graphql_use_recovery_key_after_expiration( def test_graphql_generate_recovery_key_with_expiration_in_the_past( authorized_client, tokens_file ): - expiration_date = datetime.datetime.now() - datetime.timedelta(minutes=5) + expiration_date = FIVE_MINUTES_INTO_PAST response = request_make_new_recovery_key( authorized_client, expires_at=expiration_date ) diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index a2dbb7a..7eede6a 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -2,7 +2,7 @@ # pylint: disable=unused-argument # pylint: disable=missing-function-docstring -from datetime import datetime, timedelta +from datetime import datetime, timezone from mnemonic import Mnemonic import pytest @@ -16,9 +16,8 @@ from selfprivacy_api.repositories.tokens.exceptions import ( TokenNotFound, NewDeviceKeyNotFound, ) -from selfprivacy_api.repositories.tokens.redis_tokens_repository import ( - RedisTokensRepository, -) + +from tests.common import FIVE_MINUTES_INTO_PAST ORIGINAL_DEVICE_NAMES = [ @@ -28,6 +27,10 @@ ORIGINAL_DEVICE_NAMES = [ "forth_token", ] +TEST_DATE = datetime(2022, 7, 15, 17, 41, 31, 675698, timezone.utc) +# tokens are not tz-aware +TOKEN_TEST_DATE = datetime(2022, 7, 15, 17, 41, 31, 675698) + def mnemonic_from_hex(hexkey): return Mnemonic(language="english").to_mnemonic(bytes.fromhex(hexkey)) @@ -40,8 +43,8 @@ def mock_new_device_key_generate(mocker): autospec=True, return_value=NewDeviceKey( key="43478d05b35e4781598acd76e33832bb", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), - expires_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + created_at=TEST_DATE, + expires_at=TEST_DATE, ), ) return mock @@ -55,8 +58,8 @@ def mock_new_device_key_generate_for_mnemonic(mocker): autospec=True, return_value=NewDeviceKey( key="2237238de23dc71ab558e317bdb8ff8e", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), - expires_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + created_at=TEST_DATE, + expires_at=TEST_DATE, ), ) return mock @@ -83,7 +86,7 @@ def mock_recovery_key_generate_invalid(mocker): autospec=True, return_value=RecoveryKey( key="889bf49c1d3199d71a2e704718772bd53a422020334db051", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + created_at=TEST_DATE, expires_at=None, uses_left=0, ), @@ -99,7 +102,7 @@ def mock_token_generate(mocker): return_value=Token( token="ZuLNKtnxDeq6w2dpOJhbB3iat_sJLPTPl_rN5uc5MvM", device_name="IamNewDevice", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + created_at=TOKEN_TEST_DATE, ), ) return mock @@ -112,7 +115,7 @@ def mock_recovery_key_generate(mocker): autospec=True, return_value=RecoveryKey( key="889bf49c1d3199d71a2e704718772bd53a422020334db051", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + created_at=TEST_DATE, expires_at=None, uses_left=1, ), @@ -120,14 +123,6 @@ def mock_recovery_key_generate(mocker): return mock -@pytest.fixture -def empty_redis_repo(): - repo = RedisTokensRepository() - repo.reset() - assert repo.get_tokens() == [] - return repo - - @pytest.fixture(params=["json", "redis"]) def empty_repo(request, empty_json_repo, empty_redis_repo): if request.param == "json": @@ -224,13 +219,13 @@ def test_create_token(empty_repo, mock_token_generate): assert repo.create_token(device_name="IamNewDevice") == Token( token="ZuLNKtnxDeq6w2dpOJhbB3iat_sJLPTPl_rN5uc5MvM", device_name="IamNewDevice", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + created_at=TOKEN_TEST_DATE, ) assert repo.get_tokens() == [ Token( token="ZuLNKtnxDeq6w2dpOJhbB3iat_sJLPTPl_rN5uc5MvM", device_name="IamNewDevice", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + created_at=TOKEN_TEST_DATE, ) ] @@ -266,7 +261,7 @@ def test_delete_not_found_token(some_tokens_repo): input_token = Token( token="imbadtoken", device_name="primary_token", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + created_at=TEST_DATE, ) with pytest.raises(TokenNotFound): assert repo.delete_token(input_token) is None @@ -295,7 +290,7 @@ def test_refresh_not_found_token(some_tokens_repo, mock_token_generate): input_token = Token( token="idontknowwhoiam", device_name="tellmewhoiam?", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + created_at=TEST_DATE, ) with pytest.raises(TokenNotFound): @@ -319,7 +314,7 @@ def test_create_get_recovery_key(some_tokens_repo, mock_recovery_key_generate): assert repo.create_recovery_key(uses_left=1, expiration=None) is not None assert repo.get_recovery_key() == RecoveryKey( key="889bf49c1d3199d71a2e704718772bd53a422020334db051", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + created_at=TEST_DATE, expires_at=None, uses_left=1, ) @@ -358,10 +353,13 @@ def test_use_mnemonic_expired_recovery_key( some_tokens_repo, ): repo = some_tokens_repo - expiration = datetime.now() - timedelta(minutes=5) + expiration = FIVE_MINUTES_INTO_PAST assert repo.create_recovery_key(uses_left=2, expiration=expiration) is not None recovery_key = repo.get_recovery_key() - assert recovery_key.expires_at == expiration + # TODO: do not ignore timezone once json backend is deleted + assert recovery_key.expires_at.replace(tzinfo=None) == expiration.replace( + tzinfo=None + ) assert not repo.is_recovery_key_valid() with pytest.raises(RecoveryKeyNotFound): @@ -458,8 +456,8 @@ def test_get_new_device_key(some_tokens_repo, mock_new_device_key_generate): assert repo.get_new_device_key() == NewDeviceKey( key="43478d05b35e4781598acd76e33832bb", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), - expires_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + created_at=TEST_DATE, + expires_at=TEST_DATE, ) @@ -535,7 +533,7 @@ def test_use_mnemonic_expired_new_device_key( some_tokens_repo, ): repo = some_tokens_repo - expiration = datetime.now() - timedelta(minutes=5) + expiration = FIVE_MINUTES_INTO_PAST key = repo.get_new_device_key() assert key is not None diff --git a/tests/test_rest_endpoints/test_auth.py b/tests/test_rest_endpoints/test_auth.py index ff161fb..ba54745 100644 --- a/tests/test_rest_endpoints/test_auth.py +++ b/tests/test_rest_endpoints/test_auth.py @@ -11,6 +11,8 @@ from tests.common import ( NearFuture, assert_recovery_recent, ) +from tests.common import FIVE_MINUTES_INTO_FUTURE_NAIVE as FIVE_MINUTES_INTO_FUTURE +from tests.common import FIVE_MINUTES_INTO_PAST_NAIVE as FIVE_MINUTES_INTO_PAST DATE_FORMATS = [ "%Y-%m-%dT%H:%M:%S.%fZ", @@ -110,7 +112,7 @@ def rest_recover_with_mnemonic(client, mnemonic_token, device_name): def test_get_tokens_info(authorized_client, tokens_file): - assert rest_get_tokens_info(authorized_client) == [ + assert sorted(rest_get_tokens_info(authorized_client), key=lambda x: x["name"]) == [ {"name": "test_token", "date": "2022-01-14T08:31:10.789314", "is_caller": True}, { "name": "test_token2", @@ -321,7 +323,7 @@ def test_generate_recovery_token_with_expiration_date( ): # Generate token with expiration date # Generate expiration date in the future - expiration_date = datetime.datetime.now() + datetime.timedelta(minutes=5) + expiration_date = FIVE_MINUTES_INTO_FUTURE mnemonic_token = rest_make_recovery_token( authorized_client, expires_at=expiration_date, timeformat=timeformat ) @@ -333,7 +335,7 @@ def test_generate_recovery_token_with_expiration_date( "exists": True, "valid": True, "date": time_generated, - "expiration": expiration_date.strftime("%Y-%m-%dT%H:%M:%S.%f"), + "expiration": expiration_date.isoformat(), "uses_left": None, } @@ -360,7 +362,7 @@ def test_generate_recovery_token_with_expiration_in_the_past( authorized_client, tokens_file, timeformat ): # Server must return 400 if expiration date is in the past - expiration_date = datetime.datetime.utcnow() - datetime.timedelta(minutes=5) + expiration_date = FIVE_MINUTES_INTO_PAST expiration_date_str = expiration_date.strftime(timeformat) response = authorized_client.post( "/auth/recovery_token", From 51018dd6c2e8b71071d17cffb4b78dbb3966cb18 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 11 Jan 2023 17:18:23 +0000 Subject: [PATCH 059/246] refactor(tokens-repo): cleanup actions/api_tokens.py --- selfprivacy_api/actions/api_tokens.py | 22 +++++++++++++--------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/selfprivacy_api/actions/api_tokens.py b/selfprivacy_api/actions/api_tokens.py index 2337224..520c875 100644 --- a/selfprivacy_api/actions/api_tokens.py +++ b/selfprivacy_api/actions/api_tokens.py @@ -1,4 +1,7 @@ -"""App tokens actions""" +""" +App tokens actions. +The only actions on tokens that are accessible from APIs +""" from datetime import datetime, timezone from typing import Optional from pydantic import BaseModel @@ -24,6 +27,12 @@ class TokenInfoWithIsCaller(BaseModel): date: datetime is_caller: bool +def _naive(date_time: datetime) -> datetime: + if date_time is None: + return None + if date_time.tzinfo is not None: + date_time.astimezone(timezone.utc) + return date_time.replace(tzinfo=None) def get_api_tokens_with_caller_flag(caller_token: str) -> list[TokenInfoWithIsCaller]: """Get the tokens info""" @@ -82,12 +91,7 @@ class RecoveryTokenStatus(BaseModel): uses_left: Optional[int] = None -def naive(date_time: datetime) -> datetime: - if date_time is None: - return None - if date_time.tzinfo is not None: - date_time.astimezone(timezone.utc) - return date_time.replace(tzinfo=None) + def get_api_recovery_token_status() -> RecoveryTokenStatus: @@ -99,8 +103,8 @@ def get_api_recovery_token_status() -> RecoveryTokenStatus: return RecoveryTokenStatus( exists=True, valid=is_valid, - date=naive(token.created_at), - expiration=naive(token.expires_at), + date=_naive(token.created_at), + expiration=_naive(token.expires_at), uses_left=token.uses_left, ) From baf72b730b406483477202911e9f56c0cbfe0907 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 13 Jan 2023 09:59:27 +0000 Subject: [PATCH 060/246] refactor(tokens-repo): move reset to AbstractTokensRepo --- .../tokens/abstract_tokens_repository.py | 10 ++++++++++ .../repositories/tokens/json_tokens_repository.py | 7 +++++++ .../repositories/tokens/redis_tokens_repository.py | 12 +++++------- 3 files changed, 22 insertions(+), 7 deletions(-) diff --git a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py index 3a20ede..d9a250e 100644 --- a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py @@ -123,6 +123,10 @@ class AbstractTokensRepository(ABC): return False return recovery_key.is_valid() + @abstractmethod + def _delete_recovery_key(self) -> None: + """Delete the recovery key""" + def get_new_device_key(self) -> NewDeviceKey: """Creates and returns the new device key""" new_device_key = NewDeviceKey.generate() @@ -156,6 +160,12 @@ class AbstractTokensRepository(ABC): return new_token + def reset(self): + for token in self.get_tokens(): + self.delete_token(token) + self.delete_new_device_key() + self._delete_recovery_key() + @abstractmethod def _store_token(self, new_token: Token): """Store a token directly""" diff --git a/selfprivacy_api/repositories/tokens/json_tokens_repository.py b/selfprivacy_api/repositories/tokens/json_tokens_repository.py index 09204a8..4cb7b3f 100644 --- a/selfprivacy_api/repositories/tokens/json_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/json_tokens_repository.py @@ -123,6 +123,13 @@ class JsonTokensRepository(AbstractTokensRepository): if tokens["recovery_token"]["uses_left"] is not None: tokens["recovery_token"]["uses_left"] -= 1 + def _delete_recovery_key(self) -> None: + """Delete the recovery key""" + with WriteUserData(UserDataFiles.TOKENS) as tokens_file: + if "recovery_token" in tokens_file: + del tokens_file["recovery_token"] + return + def _store_new_device_key(self, new_device_key: NewDeviceKey) -> None: with WriteUserData(UserDataFiles.TOKENS) as tokens_file: tokens_file["new_device"] = { diff --git a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py index a16b79d..27271b7 100644 --- a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py @@ -51,13 +51,6 @@ class RedisTokensRepository(AbstractTokensRepository): raise TokenNotFound redis.delete(key) - def reset(self): - for token in self.get_tokens(): - self.delete_token(token) - self.delete_new_device_key() - redis = self.connection - redis.delete(RECOVERY_KEY_REDIS_KEY) - def get_recovery_key(self) -> Optional[RecoveryKey]: """Get the recovery key""" redis = self.connection @@ -75,6 +68,11 @@ class RedisTokensRepository(AbstractTokensRepository): self._store_model_as_hash(RECOVERY_KEY_REDIS_KEY, recovery_key) return recovery_key + def _delete_recovery_key(self) -> None: + """Delete the recovery key""" + redis = self.connection + redis.delete(RECOVERY_KEY_REDIS_KEY) + def _store_new_device_key(self, new_device_key: NewDeviceKey) -> None: """Store new device key directly""" self._store_model_as_hash(NEW_DEVICE_KEY_REDIS_KEY, new_device_key) From 817f414dd95995b339070850101d395d3a761819 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 13 Jan 2023 10:24:17 +0000 Subject: [PATCH 061/246] refactor(tokens-repo): break out storing recovery keys --- .../repositories/tokens/abstract_tokens_repository.py | 4 ++++ .../repositories/tokens/json_tokens_repository.py | 7 +++++-- .../repositories/tokens/redis_tokens_repository.py | 5 ++++- 3 files changed, 13 insertions(+), 3 deletions(-) diff --git a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py index d9a250e..764f3b6 100644 --- a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py @@ -123,6 +123,10 @@ class AbstractTokensRepository(ABC): return False return recovery_key.is_valid() + @abstractmethod + def _store_recovery_key(self, recovery_key: RecoveryKey) -> None: + """Store recovery key directly""" + @abstractmethod def _delete_recovery_key(self) -> None: """Delete the recovery key""" diff --git a/selfprivacy_api/repositories/tokens/json_tokens_repository.py b/selfprivacy_api/repositories/tokens/json_tokens_repository.py index 4cb7b3f..332bef8 100644 --- a/selfprivacy_api/repositories/tokens/json_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/json_tokens_repository.py @@ -103,6 +103,11 @@ class JsonTokensRepository(AbstractTokensRepository): recovery_key = RecoveryKey.generate(expiration, uses_left) + self._store_recovery_key(recovery_key) + + return recovery_key + + def _store_recovery_key(self, recovery_key: RecoveryKey) -> None: with WriteUserData(UserDataFiles.TOKENS) as tokens_file: key_expiration: Optional[str] = None if recovery_key.expires_at is not None: @@ -114,8 +119,6 @@ class JsonTokensRepository(AbstractTokensRepository): "uses_left": recovery_key.uses_left, } - return recovery_key - def _decrement_recovery_token(self): """Decrement recovery key use count by one""" if self.is_recovery_key_valid(): diff --git a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py index 27271b7..0b3c19b 100644 --- a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py @@ -65,9 +65,12 @@ class RedisTokensRepository(AbstractTokensRepository): ) -> RecoveryKey: """Create the recovery key""" recovery_key = RecoveryKey.generate(expiration=expiration, uses_left=uses_left) - self._store_model_as_hash(RECOVERY_KEY_REDIS_KEY, recovery_key) + self._store_recovery_key(recovery_key) return recovery_key + def _store_recovery_key(self, recovery_key: RecoveryKey) -> None: + self._store_model_as_hash(RECOVERY_KEY_REDIS_KEY, recovery_key) + def _delete_recovery_key(self) -> None: """Delete the recovery key""" redis = self.connection From da19cc8c0ed881b83592ad588d908c70350e75d4 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 13 Jan 2023 10:30:04 +0000 Subject: [PATCH 062/246] refactor(tokens-repo): move create recovery key to abstract token repository --- .../tokens/abstract_tokens_repository.py | 4 +++- .../repositories/tokens/json_tokens_repository.py | 13 ------------- .../repositories/tokens/redis_tokens_repository.py | 10 ---------- 3 files changed, 3 insertions(+), 24 deletions(-) diff --git a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py index 764f3b6..e5daa4d 100644 --- a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py @@ -86,13 +86,15 @@ class AbstractTokensRepository(ABC): def get_recovery_key(self) -> Optional[RecoveryKey]: """Get the recovery key""" - @abstractmethod def create_recovery_key( self, expiration: Optional[datetime], uses_left: Optional[int], ) -> RecoveryKey: """Create the recovery key""" + recovery_key = RecoveryKey.generate(expiration, uses_left) + self._store_recovery_key(recovery_key) + return recovery_key def use_mnemonic_recovery_key( self, mnemonic_phrase: str, device_name: str diff --git a/selfprivacy_api/repositories/tokens/json_tokens_repository.py b/selfprivacy_api/repositories/tokens/json_tokens_repository.py index 332bef8..0f70a55 100644 --- a/selfprivacy_api/repositories/tokens/json_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/json_tokens_repository.py @@ -94,19 +94,6 @@ class JsonTokensRepository(AbstractTokensRepository): return recovery_key - def create_recovery_key( - self, - expiration: Optional[datetime], - uses_left: Optional[int], - ) -> RecoveryKey: - """Create the recovery key""" - - recovery_key = RecoveryKey.generate(expiration, uses_left) - - self._store_recovery_key(recovery_key) - - return recovery_key - def _store_recovery_key(self, recovery_key: RecoveryKey) -> None: with WriteUserData(UserDataFiles.TOKENS) as tokens_file: key_expiration: Optional[str] = None diff --git a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py index 0b3c19b..8e8dfe5 100644 --- a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py @@ -58,16 +58,6 @@ class RedisTokensRepository(AbstractTokensRepository): return self._recovery_key_from_hash(RECOVERY_KEY_REDIS_KEY) return None - def create_recovery_key( - self, - expiration: Optional[datetime], - uses_left: Optional[int], - ) -> RecoveryKey: - """Create the recovery key""" - recovery_key = RecoveryKey.generate(expiration=expiration, uses_left=uses_left) - self._store_recovery_key(recovery_key) - return recovery_key - def _store_recovery_key(self, recovery_key: RecoveryKey) -> None: self._store_model_as_hash(RECOVERY_KEY_REDIS_KEY, recovery_key) From d0a17d7b7a70c74aee1f9e4764d2c701a3a987ca Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 13 Jan 2023 11:37:41 +0000 Subject: [PATCH 063/246] fix(tokens-repo): make json _get_stored_new_device_key return tz-aware keys --- .../repositories/tokens/json_tokens_repository.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/repositories/tokens/json_tokens_repository.py b/selfprivacy_api/repositories/tokens/json_tokens_repository.py index 0f70a55..be753ea 100644 --- a/selfprivacy_api/repositories/tokens/json_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/json_tokens_repository.py @@ -143,7 +143,11 @@ class JsonTokensRepository(AbstractTokensRepository): new_device_key = NewDeviceKey( key=tokens_file["new_device"]["token"], - created_at=tokens_file["new_device"]["date"], - expires_at=tokens_file["new_device"]["expiration"], + created_at=self.__date_from_tokens_file( + tokens_file, "new_device", "date" + ), + expires_at=self.__date_from_tokens_file( + tokens_file, "new_device", "expiration" + ), ) return new_device_key From 5fbfaa73ea91fb179c8ba87fee65635d90e7819a Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 13 Jan 2023 11:41:17 +0000 Subject: [PATCH 064/246] feat(tokens-repo): add clone() method --- .../tokens/abstract_tokens_repository.py | 16 +++++++ .../test_repository/test_tokens_repository.py | 45 ++++++++++++++++++- 2 files changed, 60 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py index e5daa4d..d81bd65 100644 --- a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from abc import ABC, abstractmethod from datetime import datetime from typing import Optional @@ -172,6 +174,20 @@ class AbstractTokensRepository(ABC): self.delete_new_device_key() self._delete_recovery_key() + def clone(self, source: AbstractTokensRepository) -> None: + """Clone the state of another repository to this one""" + self.reset() + for token in source.get_tokens(): + self._store_token(token) + + recovery_key = source.get_recovery_key() + if recovery_key is not None: + self._store_recovery_key(recovery_key) + + new_device_key = source._get_stored_new_device_key() + if new_device_key is not None: + self._store_new_device_key(new_device_key) + @abstractmethod def _store_token(self, new_token: Token): """Store a token directly""" diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index 7eede6a..360bfa5 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -17,7 +17,17 @@ from selfprivacy_api.repositories.tokens.exceptions import ( NewDeviceKeyNotFound, ) -from tests.common import FIVE_MINUTES_INTO_PAST +from selfprivacy_api.repositories.tokens.json_tokens_repository import ( + JsonTokensRepository, +) +from selfprivacy_api.repositories.tokens.redis_tokens_repository import ( + RedisTokensRepository, +) +from selfprivacy_api.repositories.tokens.abstract_tokens_repository import ( + AbstractTokensRepository, +) + +from tests.common import FIVE_MINUTES_INTO_PAST, FIVE_MINUTES_INTO_FUTURE ORIGINAL_DEVICE_NAMES = [ @@ -560,3 +570,36 @@ def test_use_mnemonic_new_device_key_when_empty(empty_repo): ) is None ) + + +def assert_identical( + repo_a: AbstractTokensRepository, repo_b: AbstractTokensRepository +): + tokens_a = repo_a.get_tokens() + tokens_b = repo_b.get_tokens() + assert len(tokens_a) == len(tokens_b) + for token in tokens_a: + assert token in tokens_b + assert repo_a.get_recovery_key() == repo_b.get_recovery_key() + assert repo_a._get_stored_new_device_key() == repo_b._get_stored_new_device_key() + + +def clone_to_redis(repo: JsonTokensRepository): + other_repo = RedisTokensRepository() + other_repo.clone(repo) + assert_identical(repo, other_repo) + + +# we cannot easily parametrize this unfortunately, since some_tokens and empty_repo cannot coexist +def test_clone_json_to_redis_empty(empty_repo): + repo = empty_repo + if isinstance(repo, JsonTokensRepository): + clone_to_redis(repo) + + +def test_clone_json_to_redis_full(some_tokens_repo): + repo = some_tokens_repo + if isinstance(repo, JsonTokensRepository): + repo.get_new_device_key() + repo.create_recovery_key(FIVE_MINUTES_INTO_FUTURE, 2) + clone_to_redis(repo) From 3344ab7c5dbacbc4647b5631fc9e3d208dc79d1a Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 13 Jan 2023 12:13:20 +0000 Subject: [PATCH 065/246] feat(tokens-repo): add migration of tokens to redis --- selfprivacy_api/migrations/__init__.py | 2 + selfprivacy_api/migrations/redis_tokens.py | 48 ++++++++++++++++++++++ 2 files changed, 50 insertions(+) create mode 100644 selfprivacy_api/migrations/redis_tokens.py diff --git a/selfprivacy_api/migrations/__init__.py b/selfprivacy_api/migrations/__init__.py index adb7d24..222c95e 100644 --- a/selfprivacy_api/migrations/__init__.py +++ b/selfprivacy_api/migrations/__init__.py @@ -22,6 +22,7 @@ from selfprivacy_api.migrations.providers import CreateProviderFields from selfprivacy_api.migrations.prepare_for_nixos_2211 import ( MigrateToSelfprivacyChannelFrom2205, ) +from selfprivacy_api.migrations.redis_tokens import LoadTokensToRedis migrations = [ FixNixosConfigBranch(), @@ -31,6 +32,7 @@ migrations = [ CheckForFailedBindsMigration(), CreateProviderFields(), MigrateToSelfprivacyChannelFrom2205(), + LoadTokensToRedis(), ] diff --git a/selfprivacy_api/migrations/redis_tokens.py b/selfprivacy_api/migrations/redis_tokens.py new file mode 100644 index 0000000..c5eea2f --- /dev/null +++ b/selfprivacy_api/migrations/redis_tokens.py @@ -0,0 +1,48 @@ +from selfprivacy_api.migrations.migration import Migration + +from selfprivacy_api.repositories.tokens.json_tokens_repository import ( + JsonTokensRepository, +) +from selfprivacy_api.repositories.tokens.redis_tokens_repository import ( + RedisTokensRepository, +) +from selfprivacy_api.repositories.tokens.abstract_tokens_repository import ( + AbstractTokensRepository, +) + + +class LoadTokensToRedis(Migration): + """Load Json tokens into Redis""" + + def get_migration_name(self): + return "load_tokens_to_redis" + + def get_migration_description(self): + return "Loads access tokens and recovery keys from legacy json file into redis token storage" + + def is_repo_empty(self, repo: AbstractTokensRepository) -> bool: + if repo.get_tokens() != []: + return False + if repo.get_recovery_key() is not None: + return False + return True + + def is_migration_needed(self): + try: + if not self.is_repo_empty(JsonTokensRepository()) and self.is_repo_empty( + RedisTokensRepository() + ): + return True + except Exception as e: + print(e) + return False + + def migrate(self): + # Write info about providers to userdata.json + try: + RedisTokensRepository().clone(JsonTokensRepository()) + + print("Done") + except Exception as e: + print(e) + print("Error migrating access tokens from json to redis") From c77191864e0f018320e09100244b96a742319c33 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Wed, 14 Jun 2023 14:01:15 +0300 Subject: [PATCH 066/246] style: reformat --- selfprivacy_api/actions/api_tokens.py | 5 ++--- selfprivacy_api/models/tokens/time.py | 9 +++++---- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/selfprivacy_api/actions/api_tokens.py b/selfprivacy_api/actions/api_tokens.py index 520c875..37b7631 100644 --- a/selfprivacy_api/actions/api_tokens.py +++ b/selfprivacy_api/actions/api_tokens.py @@ -27,6 +27,7 @@ class TokenInfoWithIsCaller(BaseModel): date: datetime is_caller: bool + def _naive(date_time: datetime) -> datetime: if date_time is None: return None @@ -34,6 +35,7 @@ def _naive(date_time: datetime) -> datetime: date_time.astimezone(timezone.utc) return date_time.replace(tzinfo=None) + def get_api_tokens_with_caller_flag(caller_token: str) -> list[TokenInfoWithIsCaller]: """Get the tokens info""" caller_name = TOKEN_REPO.get_token_by_token_string(caller_token).device_name @@ -91,9 +93,6 @@ class RecoveryTokenStatus(BaseModel): uses_left: Optional[int] = None - - - def get_api_recovery_token_status() -> RecoveryTokenStatus: """Get the recovery token status""" token = TOKEN_REPO.get_recovery_key() diff --git a/selfprivacy_api/models/tokens/time.py b/selfprivacy_api/models/tokens/time.py index 35fd992..967fcfb 100644 --- a/selfprivacy_api/models/tokens/time.py +++ b/selfprivacy_api/models/tokens/time.py @@ -1,13 +1,14 @@ from datetime import datetime, timezone + def is_past(dt: datetime) -> bool: # we cannot compare a naive now() # to dt which might be tz-aware or unaware dt = ensure_timezone(dt) return dt < datetime.now(timezone.utc) -def ensure_timezone(dt:datetime) -> datetime: + +def ensure_timezone(dt: datetime) -> datetime: if dt.tzinfo is None or dt.tzinfo.utcoffset(None) is None: - dt = dt.replace(tzinfo= timezone.utc) - return dt - + dt = dt.replace(tzinfo=timezone.utc) + return dt From 5be3c83952a748b87c4be6a3f33c47ed0fe82057 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 21 Jun 2023 12:15:33 +0000 Subject: [PATCH 067/246] fix(tokens-repo): persistent hashing --- .../tokens/redis_tokens_repository.py | 26 ++++++++++++++----- 1 file changed, 20 insertions(+), 6 deletions(-) diff --git a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py index 8e8dfe5..ccd63be 100644 --- a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py @@ -3,6 +3,7 @@ Token repository using Redis as backend. """ from typing import Optional from datetime import datetime, timezone +from hashlib import md5 from selfprivacy_api.repositories.tokens.abstract_tokens_repository import ( AbstractTokensRepository, @@ -28,7 +29,10 @@ class RedisTokensRepository(AbstractTokensRepository): @staticmethod def token_key_for_device(device_name: str): - return TOKENS_PREFIX + str(hash(device_name)) + hash = md5() + hash.update(bytes(device_name, "utf-8")) + digest = hash.hexdigest() + return TOKENS_PREFIX + digest def get_tokens(self) -> list[Token]: """Get the tokens""" @@ -38,16 +42,23 @@ class RedisTokensRepository(AbstractTokensRepository): for key in token_keys: token = self._token_from_hash(key) if token is not None: - # token creation dates are temporarily not tz-aware - token.created_at = token.created_at.replace(tzinfo=None) tokens.append(token) return tokens + def _discover_token_key(self, input_token: Token) -> str: + """brute-force searching for tokens, for robust deletion""" + redis = self.connection + token_keys = redis.keys(TOKENS_PREFIX + "*") + for key in token_keys: + token = self._token_from_hash(key) + if token == input_token: + return key + def delete_token(self, input_token: Token) -> None: """Delete the token""" redis = self.connection - key = RedisTokensRepository._token_redis_key(input_token) - if input_token not in self.get_tokens(): + key = self._discover_token_key(input_token) + if key is None: raise TokenNotFound redis.delete(key) @@ -131,7 +142,10 @@ class RedisTokensRepository(AbstractTokensRepository): return None def _token_from_hash(self, redis_key: str) -> Optional[Token]: - return self._hash_as_model(redis_key, Token) + token = self._hash_as_model(redis_key, Token) + if token is not None: + token.created_at = token.created_at.replace(tzinfo=None) + return token def _recovery_key_from_hash(self, redis_key: str) -> Optional[RecoveryKey]: return self._hash_as_model(redis_key, RecoveryKey) From 027a37bb47040266edd09d2cab9469dad6ea3a6f Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 21 Aug 2023 11:11:56 +0000 Subject: [PATCH 068/246] feature(backup): remember the reason for making a snapshot --- selfprivacy_api/backup/__init__.py | 15 ++-- selfprivacy_api/backup/backuppers/__init__.py | 8 +- .../backup/backuppers/none_backupper.py | 5 +- .../backup/backuppers/restic_backupper.py | 81 ++++++++++--------- selfprivacy_api/backup/tasks.py | 10 ++- .../graphql/common_types/backup.py | 7 ++ selfprivacy_api/models/backup/snapshot.py | 3 + tests/test_graphql/test_backup.py | 7 +- 8 files changed, 85 insertions(+), 51 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index c28c01f..3b141fa 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -23,7 +23,7 @@ from selfprivacy_api.jobs import Jobs, JobStatus, Job from selfprivacy_api.graphql.queries.providers import ( BackupProvider as BackupProviderEnum, ) -from selfprivacy_api.graphql.common_types.backup import RestoreStrategy +from selfprivacy_api.graphql.common_types.backup import RestoreStrategy, BackupReason from selfprivacy_api.models.backup.snapshot import Snapshot @@ -264,10 +264,12 @@ class Backups: # Backup @staticmethod - def back_up(service: Service) -> Snapshot: + def back_up( + service: Service, reason: BackupReason = BackupReason.EXPLICIT + ) -> Snapshot: """The top-level function to back up a service""" folders = service.get_folders() - tag = service.get_id() + service_name = service.get_id() job = get_backup_job(service) if job is None: @@ -278,9 +280,10 @@ class Backups: service.pre_backup() snapshot = Backups.provider().backupper.start_backup( folders, - tag, + service_name, + reason=reason, ) - Backups._store_last_snapshot(tag, snapshot) + Backups._store_last_snapshot(service_name, snapshot) service.post_restore() except Exception as error: Jobs.update(job, status=JobStatus.ERROR) @@ -306,7 +309,7 @@ class Backups: snapshot: Snapshot, job: Job, ) -> None: - failsafe_snapshot = Backups.back_up(service) + failsafe_snapshot = Backups.back_up(service, BackupReason.PRE_RESTORE) Jobs.update(job, status=JobStatus.RUNNING) try: diff --git a/selfprivacy_api/backup/backuppers/__init__.py b/selfprivacy_api/backup/backuppers/__init__.py index ccf78b9..0067a41 100644 --- a/selfprivacy_api/backup/backuppers/__init__.py +++ b/selfprivacy_api/backup/backuppers/__init__.py @@ -2,6 +2,7 @@ from abc import ABC, abstractmethod from typing import List from selfprivacy_api.models.backup.snapshot import Snapshot +from selfprivacy_api.graphql.common_types.backup import BackupReason class AbstractBackupper(ABC): @@ -22,7 +23,12 @@ class AbstractBackupper(ABC): raise NotImplementedError @abstractmethod - def start_backup(self, folders: List[str], tag: str) -> Snapshot: + def start_backup( + self, + folders: List[str], + service_name: str, + reason: BackupReason = BackupReason.EXPLICIT, + ) -> Snapshot: """Start a backup of the given folders""" raise NotImplementedError diff --git a/selfprivacy_api/backup/backuppers/none_backupper.py b/selfprivacy_api/backup/backuppers/none_backupper.py index 3f9f7fd..429d9ab 100644 --- a/selfprivacy_api/backup/backuppers/none_backupper.py +++ b/selfprivacy_api/backup/backuppers/none_backupper.py @@ -2,6 +2,7 @@ from typing import List from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.backup.backuppers import AbstractBackupper +from selfprivacy_api.graphql.common_types.backup import BackupReason class NoneBackupper(AbstractBackupper): @@ -13,7 +14,9 @@ class NoneBackupper(AbstractBackupper): def set_creds(self, account: str, key: str, repo: str): pass - def start_backup(self, folders: List[str], tag: str): + def start_backup( + self, folders: List[str], tag: str, reason: BackupReason = BackupReason.EXPLICIT + ): raise NotImplementedError def get_snapshots(self) -> List[Snapshot]: diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index 3a5fc49..f5467ff 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -12,6 +12,7 @@ from os.path import exists, join from os import listdir from time import sleep +from selfprivacy_api.graphql.common_types.backup import BackupReason from selfprivacy_api.backup.util import output_yielder, sync from selfprivacy_api.backup.backuppers import AbstractBackupper from selfprivacy_api.models.backup.snapshot import Snapshot @@ -84,7 +85,7 @@ class ResticBackupper(AbstractBackupper): def _password_command(self): return f"echo {LocalBackupSecret.get()}" - def restic_command(self, *args, tag: str = "") -> List[str]: + def restic_command(self, *args, tags: List[str] = []) -> List[str]: command = [ "restic", "-o", @@ -94,13 +95,14 @@ class ResticBackupper(AbstractBackupper): "--password-command", self._password_command(), ] - if tag != "": - command.extend( - [ - "--tag", - tag, - ] - ) + if tags != []: + for tag in tags: + command.extend( + [ + "--tag", + tag, + ] + ) if args: command.extend(ResticBackupper.__flatten_list(args)) return command @@ -164,7 +166,12 @@ class ResticBackupper(AbstractBackupper): return result @unlocked_repo - def start_backup(self, folders: List[str], tag: str) -> Snapshot: + def start_backup( + self, + folders: List[str], + service_name: str, + reason: BackupReason = BackupReason.EXPLICIT, + ) -> Snapshot: """ Start backup with restic """ @@ -173,33 +180,35 @@ class ResticBackupper(AbstractBackupper): # of a string and an array of strings assert not isinstance(folders, str) + tags = [service_name, reason.value] + backup_command = self.restic_command( "backup", "--json", folders, - tag=tag, + tags=tags, ) - messages = [] - - service = get_service_by_id(tag) + service = get_service_by_id(service_name) if service is None: - raise ValueError("No service with id ", tag) - + raise ValueError("No service with id ", service_name) job = get_backup_job(service) + + messages = [] output = [] try: for raw_message in output_yielder(backup_command): output.append(raw_message) - message = self.parse_message( - raw_message, - job, - ) + message = self.parse_message(raw_message, job) messages.append(message) - return ResticBackupper._snapshot_from_backup_messages( - messages, - tag, + id = ResticBackupper._snapshot_id_from_backup_messages(messages) + return Snapshot( + created_at=datetime.datetime.now(datetime.timezone.utc), + id=id, + service_name=service_name, + reason=reason, ) + except ValueError as error: raise ValueError( "Could not create a snapshot: ", @@ -210,13 +219,13 @@ class ResticBackupper(AbstractBackupper): ) from error @staticmethod - def _snapshot_from_backup_messages(messages, repo_name) -> Snapshot: + def _snapshot_id_from_backup_messages(messages) -> Snapshot: for message in messages: if message["message_type"] == "summary": - return ResticBackupper._snapshot_from_fresh_summary( - message, - repo_name, - ) + # There is a discrepancy between versions of restic/rclone + # Some report short_id in this field and some full + return message["snapshot_id"][0:SHORT_ID_LEN] + raise ValueError("no summary message in restic json output") def parse_message(self, raw_message_line: str, job=None) -> dict: @@ -232,16 +241,6 @@ class ResticBackupper(AbstractBackupper): ) return message - @staticmethod - def _snapshot_from_fresh_summary(message: dict, repo_name) -> Snapshot: - return Snapshot( - # There is a discrepancy between versions of restic/rclone - # Some report short_id in this field and some full - id=message["snapshot_id"][0:SHORT_ID_LEN], - created_at=datetime.datetime.now(datetime.timezone.utc), - service_name=repo_name, - ) - def init(self) -> None: init_command = self.restic_command( "init", @@ -475,11 +474,19 @@ class ResticBackupper(AbstractBackupper): def get_snapshots(self) -> List[Snapshot]: """Get all snapshots from the repo""" snapshots = [] + for restic_snapshot in self._load_snapshots(): + # Compatibility with previous snaps: + if len(restic_snapshot["tags"]) == 1: + reason = BackupReason.EXPLICIT + else: + reason = restic_snapshot["tags"][1] + snapshot = Snapshot( id=restic_snapshot["short_id"], created_at=restic_snapshot["time"], service_name=restic_snapshot["tags"][0], + reason=reason, ) snapshots.append(snapshot) diff --git a/selfprivacy_api/backup/tasks.py b/selfprivacy_api/backup/tasks.py index db350d4..546b27c 100644 --- a/selfprivacy_api/backup/tasks.py +++ b/selfprivacy_api/backup/tasks.py @@ -3,7 +3,7 @@ The tasks module contains the worker tasks that are used to back up and restore """ from datetime import datetime, timezone -from selfprivacy_api.graphql.common_types.backup import RestoreStrategy +from selfprivacy_api.graphql.common_types.backup import RestoreStrategy, BackupReason from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.utils.huey import huey @@ -22,11 +22,13 @@ def validate_datetime(dt: datetime) -> bool: # huey tasks need to return something @huey.task() -def start_backup(service: Service) -> bool: +def start_backup( + service: Service, reason: BackupReason = BackupReason.EXPLICIT +) -> bool: """ The worker task that starts the backup process. """ - Backups.back_up(service) + Backups.back_up(service, reason) return True @@ -49,4 +51,4 @@ def automatic_backup(): """ time = datetime.utcnow().replace(tzinfo=timezone.utc) for service in Backups.services_to_back_up(time): - start_backup(service) + start_backup(service, BackupReason.AUTO) diff --git a/selfprivacy_api/graphql/common_types/backup.py b/selfprivacy_api/graphql/common_types/backup.py index 992363b..9eaef12 100644 --- a/selfprivacy_api/graphql/common_types/backup.py +++ b/selfprivacy_api/graphql/common_types/backup.py @@ -8,3 +8,10 @@ from enum import Enum class RestoreStrategy(Enum): INPLACE = "INPLACE" DOWNLOAD_VERIFY_OVERWRITE = "DOWNLOAD_VERIFY_OVERWRITE" + + +@strawberry.enum +class BackupReason(Enum): + EXPLICIT = "EXPLICIT" + AUTO = "AUTO" + PRE_RESTORE = "PRE_RESTORE" diff --git a/selfprivacy_api/models/backup/snapshot.py b/selfprivacy_api/models/backup/snapshot.py index 9893f03..28ad661 100644 --- a/selfprivacy_api/models/backup/snapshot.py +++ b/selfprivacy_api/models/backup/snapshot.py @@ -1,8 +1,11 @@ import datetime from pydantic import BaseModel +from selfprivacy_api.graphql.common_types.backup import BackupReason + class Snapshot(BaseModel): id: str service_name: str created_at: datetime.datetime + reason: BackupReason diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 1990ef7..fdb8497 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -14,7 +14,7 @@ from selfprivacy_api.services import Service, get_all_services from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services.test_service import DummyService from selfprivacy_api.graphql.queries.providers import BackupProvider -from selfprivacy_api.graphql.common_types.backup import RestoreStrategy +from selfprivacy_api.graphql.common_types.backup import RestoreStrategy, BackupReason from selfprivacy_api.jobs import Jobs, JobStatus from selfprivacy_api.models.backup.snapshot import Snapshot @@ -428,7 +428,10 @@ def test_forget_snapshot(backups, dummy_service): def test_forget_nonexistent_snapshot(backups, dummy_service): bogus = Snapshot( - id="gibberjibber", service_name="nohoho", created_at=datetime.now(timezone.utc) + id="gibberjibber", + service_name="nohoho", + created_at=datetime.now(timezone.utc), + reason=BackupReason.EXPLICIT, ) with pytest.raises(ValueError): Backups.forget_snapshot(bogus) From 1b9761293cea920a3e39ce606173412eba30758f Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 21 Aug 2023 11:30:35 +0000 Subject: [PATCH 069/246] test(backup): test reasons --- tests/test_graphql/test_backup.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index fdb8497..16933b8 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -286,6 +286,16 @@ def test_backup_returns_snapshot(backups, dummy_service): assert Backups.get_snapshot_by_id(snapshot.id) is not None assert snapshot.service_name == name assert snapshot.created_at is not None + assert snapshot.reason == BackupReason.EXPLICIT + + +def test_backup_reasons(backups, dummy_service): + snap = Backups.back_up(dummy_service, BackupReason.AUTO) + assert snap.reason == BackupReason.AUTO + + Backups.force_snapshot_cache_reload() + snaps = Backups.get_snapshots(dummy_service) + assert snaps[0].reason == BackupReason.AUTO def folder_files(folder): @@ -495,6 +505,8 @@ def test_restore_snapshot_task( snaps = Backups.get_snapshots(dummy_service) if restore_strategy == RestoreStrategy.INPLACE: assert len(snaps) == 2 + reasons = [snap.reason for snap in snaps] + assert BackupReason.PRE_RESTORE in reasons else: assert len(snaps) == 1 From 30b62c351aba37c60edb81aa8f219c1dad2fd6a4 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 21 Aug 2023 11:31:29 +0000 Subject: [PATCH 070/246] feature(redis): compatibility with str enums --- selfprivacy_api/utils/redis_model_storage.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/selfprivacy_api/utils/redis_model_storage.py b/selfprivacy_api/utils/redis_model_storage.py index 51faff7..06dfe8c 100644 --- a/selfprivacy_api/utils/redis_model_storage.py +++ b/selfprivacy_api/utils/redis_model_storage.py @@ -1,11 +1,14 @@ from datetime import datetime from typing import Optional +from enum import Enum def store_model_as_hash(redis, redis_key, model): for key, value in model.dict().items(): if isinstance(value, datetime): value = value.isoformat() + if isinstance(value, Enum): + value = value.value redis.hset(redis_key, key, str(value)) From b2c7e8b73a3cf59ddd67206da6314e120f3de7ee Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 21 Aug 2023 12:45:31 +0000 Subject: [PATCH 071/246] feature(backups): caps for autobackups --- selfprivacy_api/backup/__init__.py | 37 ++++++++++++++++++++++++++++++ selfprivacy_api/backup/storage.py | 13 +++++++++++ tests/test_graphql/test_backup.py | 25 ++++++++++++++++++++ 3 files changed, 75 insertions(+) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 3b141fa..b16f089 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -283,7 +283,10 @@ class Backups: service_name, reason=reason, ) + Backups._store_last_snapshot(service_name, snapshot) + if reason == BackupReason.AUTO: + Backups._prune_auto_snaps(service) service.post_restore() except Exception as error: Jobs.update(job, status=JobStatus.ERROR) @@ -292,6 +295,40 @@ class Backups: Jobs.update(job, status=JobStatus.FINISHED) return snapshot + @staticmethod + def _auto_snaps(service): + return [ + snap + for snap in Backups.get_snapshots(service) + if snap.reason == BackupReason.AUTO + ] + + @staticmethod + def _prune_auto_snaps(service) -> None: + max = Backups.max_auto_snapshots() + if max == -1: + return + + auto_snaps = Backups._auto_snaps(service) + if len(auto_snaps) > max: + n_to_kill = len(auto_snaps) - max + sorted_snaps = sorted(auto_snaps, key=lambda s: s.created_at) + snaps_to_kill = sorted_snaps[:n_to_kill] + for snap in snaps_to_kill: + Backups.forget_snapshot(snap) + + @staticmethod + def set_max_auto_snapshots(value: int) -> None: + """everything <=0 means unlimited""" + if value <= 0: + value = -1 + Storage.set_max_auto_snapshots(value) + + @staticmethod + def max_auto_snapshots() -> int: + """-1 means unlimited""" + return Storage.max_auto_snapshots() + # Restoring @staticmethod diff --git a/selfprivacy_api/backup/storage.py b/selfprivacy_api/backup/storage.py index d46f584..1a0091f 100644 --- a/selfprivacy_api/backup/storage.py +++ b/selfprivacy_api/backup/storage.py @@ -26,6 +26,7 @@ REDIS_INITTED_CACHE = "backups:repo_initted" REDIS_PROVIDER_KEY = "backups:provider" REDIS_AUTOBACKUP_PERIOD_KEY = "backups:autobackup_period" +REDIS_AUTOBACKUP_MAX_KEY = "backups:autobackup_cap" redis = RedisPool().get_connection() @@ -39,6 +40,7 @@ class Storage: redis.delete(REDIS_PROVIDER_KEY) redis.delete(REDIS_AUTOBACKUP_PERIOD_KEY) redis.delete(REDIS_INITTED_CACHE) + redis.delete(REDIS_AUTOBACKUP_MAX_KEY) prefixes_to_clean = [ REDIS_SNAPSHOTS_PREFIX, @@ -175,3 +177,14 @@ class Storage: def mark_as_uninitted(): """Marks the repository as initialized""" redis.delete(REDIS_INITTED_CACHE) + + @staticmethod + def set_max_auto_snapshots(value: int): + redis.set(REDIS_AUTOBACKUP_MAX_KEY, value) + + @staticmethod + def max_auto_snapshots(): + if redis.exists(REDIS_AUTOBACKUP_MAX_KEY): + return int(redis.get(REDIS_AUTOBACKUP_MAX_KEY)) + else: + return -1 diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 16933b8..781468a 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -298,6 +298,31 @@ def test_backup_reasons(backups, dummy_service): assert snaps[0].reason == BackupReason.AUTO +def test_too_many_auto(backups, dummy_service): + assert Backups.max_auto_snapshots() == -1 + Backups.set_max_auto_snapshots(2) + assert Backups.max_auto_snapshots() == 2 + + snap = Backups.back_up(dummy_service, BackupReason.AUTO) + assert len(Backups.get_snapshots(dummy_service)) == 1 + snap2 = Backups.back_up(dummy_service, BackupReason.AUTO) + assert len(Backups.get_snapshots(dummy_service)) == 2 + snap3 = Backups.back_up(dummy_service, BackupReason.AUTO) + assert len(Backups.get_snapshots(dummy_service)) == 2 + + snaps = Backups.get_snapshots(dummy_service) + + assert snap2 in snaps + assert snap3 in snaps + assert snap not in snaps + + Backups.set_max_auto_snapshots(-1) + snap4 = Backups.back_up(dummy_service, BackupReason.AUTO) + snaps = Backups.get_snapshots(dummy_service) + assert len(snaps) == 3 + assert snap4 in snaps + + def folder_files(folder): return [ path.join(folder, filename) From 9207f5385ca2f23f4ef240c80246499930d0afd9 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 28 Aug 2023 17:02:45 +0000 Subject: [PATCH 072/246] feature(backups): actual finegrained quotas --- selfprivacy_api/backup/__init__.py | 97 +++++++- selfprivacy_api/backup/storage.py | 26 ++ .../graphql/common_types/backup.py | 14 ++ tests/test_graphql/test_backup.py | 232 ++++++++++++++++-- 4 files changed, 340 insertions(+), 29 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index b16f089..73f74a9 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -23,7 +23,18 @@ from selfprivacy_api.jobs import Jobs, JobStatus, Job from selfprivacy_api.graphql.queries.providers import ( BackupProvider as BackupProviderEnum, ) -from selfprivacy_api.graphql.common_types.backup import RestoreStrategy, BackupReason +from selfprivacy_api.graphql.common_types.backup import ( + RestoreStrategy, + BackupReason, + AutobackupQuotas, +) +from selfprivacy_api.backup.time import ( + same_day, + same_month, + same_week, + same_year, + same_lifetime_of_the_universe, +) from selfprivacy_api.models.backup.snapshot import Snapshot @@ -303,20 +314,88 @@ class Backups: if snap.reason == BackupReason.AUTO ] + @staticmethod + def add_snap_but_with_quotas( + new_snap: Snapshot, snaps: List[Snapshot], quotas: AutobackupQuotas + ) -> None: + quotas_map = { + same_day: quotas.daily, + same_week: quotas.weekly, + same_month: quotas.monthly, + same_year: quotas.yearly, + same_lifetime_of_the_universe: quotas.total, + } + + snaps.append(new_snap) + + for is_same_period, quota in quotas_map.items(): + if quota <= 0: + continue + + cohort = [ + snap + for snap in snaps + if is_same_period(snap.created_at, new_snap.created_at) + ] + sorted_cohort = sorted(cohort, key=lambda s: s.created_at) + n_to_kill = len(cohort) - quota + if n_to_kill > 0: + snaps_to_kill = sorted_cohort[:n_to_kill] + for snap in snaps_to_kill: + snaps.remove(snap) + + @staticmethod + def _prune_snaps_with_quotas(snapshots: List[Snapshot]) -> List[Snapshot]: + # Function broken out for testability + sorted_snaps = sorted(snapshots, key=lambda s: s.created_at) + quotas = Backups.autobackup_quotas() + + new_snaplist: List[Snapshot] = [] + for snap in sorted_snaps: + Backups.add_snap_but_with_quotas(snap, new_snaplist, quotas) + + return new_snaplist + @staticmethod def _prune_auto_snaps(service) -> None: - max = Backups.max_auto_snapshots() - if max == -1: - return + # Not very testable by itself, so most testing is going on Backups._prune_snaps_with_quotas + # We can still test total limits and, say, daily limits auto_snaps = Backups._auto_snaps(service) - if len(auto_snaps) > max: - n_to_kill = len(auto_snaps) - max - sorted_snaps = sorted(auto_snaps, key=lambda s: s.created_at) - snaps_to_kill = sorted_snaps[:n_to_kill] - for snap in snaps_to_kill: + new_snaplist = Backups._prune_snaps_with_quotas(auto_snaps) + + # TODO: Can be optimized since there is forgetting of an array in one restic op + # but most of the time this will be only one snap to forget. + for snap in auto_snaps: + if snap not in new_snaplist: Backups.forget_snapshot(snap) + @staticmethod + def _standardize_quotas(i: int) -> int: + if i <= 0: + i = -1 + return i + + @staticmethod + def autobackup_quotas() -> AutobackupQuotas: + """everything <=0 means unlimited""" + + return Storage.autobackup_quotas() + + @staticmethod + def set_autobackup_quotas(quotas: AutobackupQuotas) -> None: + """everything <=0 means unlimited""" + + Storage.set_autobackup_quotas( + AutobackupQuotas( + daily=Backups._standardize_quotas(quotas.daily), + weekly=Backups._standardize_quotas(quotas.weekly), + monthly=Backups._standardize_quotas(quotas.monthly), + yearly=Backups._standardize_quotas(quotas.yearly), + total=Backups._standardize_quotas(quotas.total), + ) + ) + @staticmethod def set_max_auto_snapshots(value: int) -> None: """everything <=0 means unlimited""" diff --git a/selfprivacy_api/backup/storage.py b/selfprivacy_api/backup/storage.py index 1a0091f..38fc3a2 100644 --- a/selfprivacy_api/backup/storage.py +++ b/selfprivacy_api/backup/storage.py @@ -6,6 +6,10 @@ from datetime import datetime from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.models.backup.provider import BackupProviderModel +from selfprivacy_api.graphql.common_types.backup import ( + AutobackupQuotas, + _AutobackupQuotas, +) from selfprivacy_api.utils.redis_pool import RedisPool from selfprivacy_api.utils.redis_model_storage import ( @@ -27,6 +31,7 @@ REDIS_PROVIDER_KEY = "backups:provider" REDIS_AUTOBACKUP_PERIOD_KEY = "backups:autobackup_period" REDIS_AUTOBACKUP_MAX_KEY = "backups:autobackup_cap" +REDIS_AUTOBACKUP_QUOTAS_KEY = "backups:autobackup_quotas_key" redis = RedisPool().get_connection() @@ -41,6 +46,7 @@ class Storage: redis.delete(REDIS_AUTOBACKUP_PERIOD_KEY) redis.delete(REDIS_INITTED_CACHE) redis.delete(REDIS_AUTOBACKUP_MAX_KEY) + redis.delete(REDIS_AUTOBACKUP_QUOTAS_KEY) prefixes_to_clean = [ REDIS_SNAPSHOTS_PREFIX, @@ -178,6 +184,26 @@ class Storage: """Marks the repository as initialized""" redis.delete(REDIS_INITTED_CACHE) + @staticmethod + def set_autobackup_quotas(quotas: AutobackupQuotas) -> None: + store_model_as_hash(redis, REDIS_AUTOBACKUP_QUOTAS_KEY, quotas.to_pydantic()) + + @staticmethod + def autobackup_quotas() -> AutobackupQuotas: + quotas_model = hash_as_model( + redis, REDIS_AUTOBACKUP_QUOTAS_KEY, _AutobackupQuotas + ) + if quotas_model is None: + unlimited_quotas = AutobackupQuotas( + daily=-1, + weekly=-1, + monthly=-1, + yearly=-1, + total=-1, + ) + return unlimited_quotas + return AutobackupQuotas.from_pydantic(quotas_model) + @staticmethod def set_max_auto_snapshots(value: int): redis.set(REDIS_AUTOBACKUP_MAX_KEY, value) diff --git a/selfprivacy_api/graphql/common_types/backup.py b/selfprivacy_api/graphql/common_types/backup.py index 9eaef12..3d5b5aa 100644 --- a/selfprivacy_api/graphql/common_types/backup.py +++ b/selfprivacy_api/graphql/common_types/backup.py @@ -2,6 +2,7 @@ # pylint: disable=too-few-public-methods import strawberry from enum import Enum +from pydantic import BaseModel @strawberry.enum @@ -15,3 +16,16 @@ class BackupReason(Enum): EXPLICIT = "EXPLICIT" AUTO = "AUTO" PRE_RESTORE = "PRE_RESTORE" + + +class _AutobackupQuotas(BaseModel): + daily: int + weekly: int + monthly: int + yearly: int + total: int + + +@strawberry.experimental.pydantic.type(model=_AutobackupQuotas, all_fields=True) +class AutobackupQuotas: + pass diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 781468a..3314597 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -5,8 +5,12 @@ from os import makedirs from os import remove from os import listdir from os import urandom -from datetime import datetime, timedelta, timezone +from datetime import datetime, timedelta, timezone, date, time from subprocess import Popen +from copy import copy + +import secrets + import selfprivacy_api.services as services from selfprivacy_api.services import Service, get_all_services @@ -19,6 +23,8 @@ from selfprivacy_api.jobs import Jobs, JobStatus from selfprivacy_api.models.backup.snapshot import Snapshot +from selfprivacy_api.graphql.common_types.backup import AutobackupQuotas + from selfprivacy_api.backup import Backups, BACKUP_PROVIDER_ENVS import selfprivacy_api.backup.providers as providers from selfprivacy_api.backup.providers import AbstractBackupProvider @@ -298,29 +304,215 @@ def test_backup_reasons(backups, dummy_service): assert snaps[0].reason == BackupReason.AUTO -def test_too_many_auto(backups, dummy_service): - assert Backups.max_auto_snapshots() == -1 - Backups.set_max_auto_snapshots(2) - assert Backups.max_auto_snapshots() == 2 +unlimited_quotas = AutobackupQuotas( + daily=-1, + weekly=-1, + monthly=-1, + yearly=-1, + total=-1, +) - snap = Backups.back_up(dummy_service, BackupReason.AUTO) - assert len(Backups.get_snapshots(dummy_service)) == 1 - snap2 = Backups.back_up(dummy_service, BackupReason.AUTO) - assert len(Backups.get_snapshots(dummy_service)) == 2 - snap3 = Backups.back_up(dummy_service, BackupReason.AUTO) - assert len(Backups.get_snapshots(dummy_service)) == 2 - snaps = Backups.get_snapshots(dummy_service) +def test_get_empty_quotas(backups): + quotas = Backups.autobackup_quotas() + assert quotas is not None + assert quotas == unlimited_quotas - assert snap2 in snaps - assert snap3 in snaps - assert snap not in snaps - Backups.set_max_auto_snapshots(-1) - snap4 = Backups.back_up(dummy_service, BackupReason.AUTO) - snaps = Backups.get_snapshots(dummy_service) - assert len(snaps) == 3 - assert snap4 in snaps +def test_set_quotas(backups): + quotas = AutobackupQuotas( + daily=2343, + weekly=343, + monthly=0, + yearly=-34556, + total=563, + ) + Backups.set_autobackup_quotas(quotas) + assert Backups.autobackup_quotas() == AutobackupQuotas( + daily=2343, + weekly=343, + monthly=-1, + yearly=-1, + total=563, + ) + + +def dummy_snapshot(date: datetime): + return Snapshot( + id=str(hash(date)), + service_name="someservice", + created_at=date, + reason=BackupReason.EXPLICIT, + ) + + +def test_autobackup_snapshots_pruning(backups): + # Wednesday, fourth week + now = datetime(year=2023, month=1, day=25, hour=10) + + snaps = [ + dummy_snapshot(now - timedelta(days=365 * 2)), + dummy_snapshot(now - timedelta(days=20)), + dummy_snapshot(now - timedelta(days=2)), + dummy_snapshot(now - timedelta(days=1, hours=3)), + dummy_snapshot(now - timedelta(days=1, hours=2)), + dummy_snapshot(now - timedelta(days=1)), + dummy_snapshot(now - timedelta(hours=2)), + dummy_snapshot(now - timedelta(minutes=5)), + dummy_snapshot(now), + ] + old_len = len(snaps) + + quotas = copy(unlimited_quotas) + Backups.set_autobackup_quotas(quotas) + assert Backups._prune_snaps_with_quotas(snaps) == snaps + + quotas = copy(unlimited_quotas) + quotas.daily = 2 + Backups.set_autobackup_quotas(quotas) + + pruned_snaps = Backups._prune_snaps_with_quotas(snaps) + assert pruned_snaps == [ + dummy_snapshot(now - timedelta(days=365 * 2)), + dummy_snapshot(now - timedelta(days=20)), + dummy_snapshot(now - timedelta(days=2)), + dummy_snapshot(now - timedelta(days=1, hours=2)), + dummy_snapshot(now - timedelta(days=1)), + dummy_snapshot(now - timedelta(minutes=5)), + dummy_snapshot(now), + ] + + # checking that this function does not mutate the argument + assert snaps != pruned_snaps + assert len(snaps) == old_len + + quotas = copy(unlimited_quotas) + quotas.weekly = 4 + Backups.set_autobackup_quotas(quotas) + + pruned_snaps = Backups._prune_snaps_with_quotas(snaps) + assert pruned_snaps == [ + dummy_snapshot(now - timedelta(days=365 * 2)), + dummy_snapshot(now - timedelta(days=20)), + dummy_snapshot(now - timedelta(days=1)), + dummy_snapshot(now - timedelta(hours=2)), + dummy_snapshot(now - timedelta(minutes=5)), + dummy_snapshot(now), + ] + + quotas = copy(unlimited_quotas) + quotas.monthly = 7 + Backups.set_autobackup_quotas(quotas) + + pruned_snaps = Backups._prune_snaps_with_quotas(snaps) + assert pruned_snaps == [ + dummy_snapshot(now - timedelta(days=365 * 2)), + dummy_snapshot(now - timedelta(days=2)), + dummy_snapshot(now - timedelta(days=1, hours=3)), + dummy_snapshot(now - timedelta(days=1, hours=2)), + dummy_snapshot(now - timedelta(days=1)), + dummy_snapshot(now - timedelta(hours=2)), + dummy_snapshot(now - timedelta(minutes=5)), + dummy_snapshot(now), + ] + + +def test_autobackup_snapshots_pruning_yearly(backups): + snaps = [ + dummy_snapshot(datetime(year=2023, month=2, day=1)), + dummy_snapshot(datetime(year=2023, month=3, day=1)), + dummy_snapshot(datetime(year=2023, month=4, day=1)), + dummy_snapshot(datetime(year=2055, month=3, day=1)), + ] + quotas = copy(unlimited_quotas) + quotas.yearly = 2 + Backups.set_autobackup_quotas(quotas) + + pruned_snaps = Backups._prune_snaps_with_quotas(snaps) + assert pruned_snaps == [ + dummy_snapshot(datetime(year=2023, month=3, day=1)), + dummy_snapshot(datetime(year=2023, month=4, day=1)), + dummy_snapshot(datetime(year=2055, month=3, day=1)), + ] + + +def test_autobackup_snapshots_pruning_bottleneck(backups): + now = datetime(year=2023, month=1, day=25, hour=10) + snaps = [ + dummy_snapshot(now - timedelta(hours=4)), + dummy_snapshot(now - timedelta(hours=3)), + dummy_snapshot(now - timedelta(hours=2)), + dummy_snapshot(now - timedelta(minutes=5)), + dummy_snapshot(now), + ] + + yearly_quota = copy(unlimited_quotas) + yearly_quota.yearly = 2 + + monthly_quota = copy(unlimited_quotas) + monthly_quota.monthly = 2 + + weekly_quota = copy(unlimited_quotas) + weekly_quota.weekly = 2 + + daily_quota = copy(unlimited_quotas) + daily_quota.daily = 2 + + total_quota = copy(unlimited_quotas) + total_quota.total = 2 + + for quota in [total_quota, yearly_quota, monthly_quota, weekly_quota, daily_quota]: + Backups.set_autobackup_quotas(quota) + pruned_snaps = Backups._prune_snaps_with_quotas(snaps) + assert pruned_snaps == [ + dummy_snapshot(now - timedelta(minutes=5)), + dummy_snapshot(now), + ] + + +def test_autobackup_snapshots_pruning_edgeweek(backups): + # jan 1 2023 is Sunday + snaps = [ + dummy_snapshot(datetime(year=2022, month=12, day=30)), + dummy_snapshot(datetime(year=2022, month=12, day=31)), + dummy_snapshot(datetime(year=2023, month=1, day=1)), + dummy_snapshot(datetime(year=2023, month=1, day=6)), + ] + quotas = copy(unlimited_quotas) + quotas.weekly = 2 + Backups.set_autobackup_quotas(quotas) + + pruned_snaps = Backups._prune_snaps_with_quotas(snaps) + assert pruned_snaps == [ + dummy_snapshot(datetime(year=2022, month=12, day=31)), + dummy_snapshot(datetime(year=2023, month=1, day=1)), + dummy_snapshot(datetime(year=2023, month=1, day=6)), + ] + + +# def test_too_many_auto(backups, dummy_service): +# assert Backups.autobackup_quotas() +# Backups.set_max_auto_snapshots(2) +# assert Backups.max_auto_snapshots() == 2 + +# snap = Backups.back_up(dummy_service, BackupReason.AUTO) +# assert len(Backups.get_snapshots(dummy_service)) == 1 +# snap2 = Backups.back_up(dummy_service, BackupReason.AUTO) +# assert len(Backups.get_snapshots(dummy_service)) == 2 +# snap3 = Backups.back_up(dummy_service, BackupReason.AUTO) +# assert len(Backups.get_snapshots(dummy_service)) == 2 + +# snaps = Backups.get_snapshots(dummy_service) + +# assert snap2 in snaps +# assert snap3 in snaps +# assert snap not in snaps + +# Backups.set_max_auto_snapshots(-1) +# snap4 = Backups.back_up(dummy_service, BackupReason.AUTO) +# snaps = Backups.get_snapshots(dummy_service) +# assert len(snaps) == 3 +# assert snap4 in snaps def folder_files(folder): From a75a102df6cb2846bde8ad9e013dab8f06648c28 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 28 Aug 2023 17:15:27 +0000 Subject: [PATCH 073/246] test(backups): test quotas with actual backups --- tests/test_graphql/test_backup.py | 58 +++++++++++++++++++++---------- 1 file changed, 39 insertions(+), 19 deletions(-) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 3314597..550c56b 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -490,29 +490,49 @@ def test_autobackup_snapshots_pruning_edgeweek(backups): ] -# def test_too_many_auto(backups, dummy_service): -# assert Backups.autobackup_quotas() -# Backups.set_max_auto_snapshots(2) -# assert Backups.max_auto_snapshots() == 2 +def test_too_many_auto(backups, dummy_service): + assert Backups.autobackup_quotas() + quota = copy(unlimited_quotas) + quota.total = 2 + Backups.set_autobackup_quotas(quota) + assert Backups.autobackup_quotas().total == 2 -# snap = Backups.back_up(dummy_service, BackupReason.AUTO) -# assert len(Backups.get_snapshots(dummy_service)) == 1 -# snap2 = Backups.back_up(dummy_service, BackupReason.AUTO) -# assert len(Backups.get_snapshots(dummy_service)) == 2 -# snap3 = Backups.back_up(dummy_service, BackupReason.AUTO) -# assert len(Backups.get_snapshots(dummy_service)) == 2 + snap = Backups.back_up(dummy_service, BackupReason.AUTO) + assert len(Backups.get_snapshots(dummy_service)) == 1 + snap2 = Backups.back_up(dummy_service, BackupReason.AUTO) + assert len(Backups.get_snapshots(dummy_service)) == 2 + snap3 = Backups.back_up(dummy_service, BackupReason.AUTO) + assert len(Backups.get_snapshots(dummy_service)) == 2 -# snaps = Backups.get_snapshots(dummy_service) + snaps = Backups.get_snapshots(dummy_service) + assert snap2 in snaps + assert snap3 in snaps + assert snap not in snaps -# assert snap2 in snaps -# assert snap3 in snaps -# assert snap not in snaps + quota.total = -1 + Backups.set_autobackup_quotas(quota) + snap4 = Backups.back_up(dummy_service, BackupReason.AUTO) -# Backups.set_max_auto_snapshots(-1) -# snap4 = Backups.back_up(dummy_service, BackupReason.AUTO) -# snaps = Backups.get_snapshots(dummy_service) -# assert len(snaps) == 3 -# assert snap4 in snaps + snaps = Backups.get_snapshots(dummy_service) + assert len(snaps) == 3 + assert snap4 in snaps + + # Retroactivity + quota.total = 1 + Backups.set_autobackup_quotas(quota) + snap5 = Backups.back_up(dummy_service, BackupReason.AUTO) + + snaps = Backups.get_snapshots(dummy_service) + assert len(snaps) == 1 + assert snap5 in snaps + + # Explicit snaps are not affected + snap6 = Backups.back_up(dummy_service, BackupReason.EXPLICIT) + + snaps = Backups.get_snapshots(dummy_service) + assert len(snaps) == 2 + assert snap5 in snaps + assert snap6 in snaps def folder_files(folder): From 1fc47b049daa262637c244e785de4e792fa2dd06 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 28 Aug 2023 17:23:21 +0000 Subject: [PATCH 074/246] refactor(backups): clean up caps code --- selfprivacy_api/backup/__init__.py | 12 ------------ selfprivacy_api/backup/storage.py | 13 ------------- 2 files changed, 25 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 73f74a9..336b705 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -396,18 +396,6 @@ class Backups: ) ) - @staticmethod - def set_max_auto_snapshots(value: int) -> None: - """everything <=0 means unlimited""" - if value <= 0: - value = -1 - Storage.set_max_auto_snapshots(value) - - @staticmethod - def max_auto_snapshots() -> int: - """-1 means unlimited""" - return Storage.max_auto_snapshots() - # Restoring @staticmethod diff --git a/selfprivacy_api/backup/storage.py b/selfprivacy_api/backup/storage.py index 38fc3a2..86b92f3 100644 --- a/selfprivacy_api/backup/storage.py +++ b/selfprivacy_api/backup/storage.py @@ -30,7 +30,6 @@ REDIS_INITTED_CACHE = "backups:repo_initted" REDIS_PROVIDER_KEY = "backups:provider" REDIS_AUTOBACKUP_PERIOD_KEY = "backups:autobackup_period" -REDIS_AUTOBACKUP_MAX_KEY = "backups:autobackup_cap" REDIS_AUTOBACKUP_QUOTAS_KEY = "backups:autobackup_quotas_key" redis = RedisPool().get_connection() @@ -45,7 +44,6 @@ class Storage: redis.delete(REDIS_PROVIDER_KEY) redis.delete(REDIS_AUTOBACKUP_PERIOD_KEY) redis.delete(REDIS_INITTED_CACHE) - redis.delete(REDIS_AUTOBACKUP_MAX_KEY) redis.delete(REDIS_AUTOBACKUP_QUOTAS_KEY) prefixes_to_clean = [ @@ -203,14 +201,3 @@ class Storage: ) return unlimited_quotas return AutobackupQuotas.from_pydantic(quotas_model) - - @staticmethod - def set_max_auto_snapshots(value: int): - redis.set(REDIS_AUTOBACKUP_MAX_KEY, value) - - @staticmethod - def max_auto_snapshots(): - if redis.exists(REDIS_AUTOBACKUP_MAX_KEY): - return int(redis.get(REDIS_AUTOBACKUP_MAX_KEY)) - else: - return -1 From 0c04975ea4013d39ba60f98eee8894e2cc1f2bbb Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 28 Aug 2023 17:24:20 +0000 Subject: [PATCH 075/246] flx(backups): commit forgotten time.py --- selfprivacy_api/backup/time.py | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 selfprivacy_api/backup/time.py diff --git a/selfprivacy_api/backup/time.py b/selfprivacy_api/backup/time.py new file mode 100644 index 0000000..aba12bd --- /dev/null +++ b/selfprivacy_api/backup/time.py @@ -0,0 +1,28 @@ +from datetime import datetime, timedelta, time + + +def same_day(a: datetime, b: datetime) -> bool: + return a.date() == b.date() + + +def same_week(a: datetime, b: datetime) -> bool: + # doing the hard way because weeks traverse the edges of years + zerobased_weekday = a.isoweekday() - 1 + start_of_day = datetime.combine(a.date(), time.min) + start_of_week = start_of_day - timedelta(days=zerobased_weekday) + end_of_week = start_of_week + timedelta(days=7) + + if b >= start_of_week and b <= end_of_week: + return True + return False + + +def same_month(a: datetime, b: datetime) -> bool: + return a.month == b.month and a.year == b.year + + +def same_year(a: datetime, b: datetime) -> bool: + return a.year == b.year + +def same_lifetime_of_the_universe(a: datetime, b: datetime) -> bool: + return True From 9fdc536f9fa5fce702b1fea0133ce0f73c497dcf Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 28 Aug 2023 18:24:29 +0000 Subject: [PATCH 076/246] BROKEN(backups): hooking up quotas to API fails. AutobackupQuotas needs to be an input type, but if input type, it fails because it needs to be an Output type, which is not documented --- .../graphql/mutations/backup_mutations.py | 39 ++++++++++++++- selfprivacy_api/graphql/queries/backup.py | 4 ++ tests/test_graphql/test_api_backup.py | 50 +++++++++++++++++++ 3 files changed, 92 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index c022d57..babbcf8 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -11,7 +11,10 @@ from selfprivacy_api.graphql.queries.backup import BackupConfiguration from selfprivacy_api.graphql.queries.backup import Backup from selfprivacy_api.graphql.queries.providers import BackupProvider from selfprivacy_api.graphql.common_types.jobs import job_to_api_job -from selfprivacy_api.graphql.common_types.backup import RestoreStrategy +from selfprivacy_api.graphql.common_types.backup import ( + RestoreStrategy, + AutobackupQuotas, +) from selfprivacy_api.backup import Backups from selfprivacy_api.services import get_service_by_id @@ -33,6 +36,13 @@ class InitializeRepositoryInput: password: str +@strawberry.input +class SetAutobackupQuotasInput: + """A single field input to reuse AutobackupQuotas""" + + quotas: AutobackupQuotas + + @strawberry.type class GenericBackupConfigReturn(MutationReturnInterface): """Generic backup config return""" @@ -90,6 +100,33 @@ class BackupMutations: configuration=Backup().configuration(), ) + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def set_autobackup_quotas( + self, quotas: SetAutobackupQuotasInput + ) -> GenericBackupConfigReturn: + """ + Set autobackup quotas. + Values <=0 for any timeframe mean no limits for that timeframe. + To disable autobackup use autobackup period setting, not this mutation. + """ + + try: + Backups.set_autobackup_quotas(quotas) + return GenericBackupConfigReturn( + success=True, + message="", + code=200, + configuration=Backup().configuration(), + ) + + except Exception as e: + return GenericBackupConfigReturn( + success=False, + message=str(e), + code=400, + configuration=Backup().configuration(), + ) + @strawberry.mutation(permission_classes=[IsAuthenticated]) def start_backup(self, service_id: str) -> GenericJobMutationReturn: """Start backup""" diff --git a/selfprivacy_api/graphql/queries/backup.py b/selfprivacy_api/graphql/queries/backup.py index 6535a88..e03215d 100644 --- a/selfprivacy_api/graphql/queries/backup.py +++ b/selfprivacy_api/graphql/queries/backup.py @@ -13,6 +13,7 @@ from selfprivacy_api.graphql.common_types.service import ( SnapshotInfo, service_to_graphql_service, ) +from selfprivacy_api.graphql.common_types.backup import AutobackupQuotas from selfprivacy_api.services import get_service_by_id @@ -26,6 +27,8 @@ class BackupConfiguration: is_initialized: bool # If none, autobackups are disabled autobackup_period: typing.Optional[int] + # None is equal to all quotas being unlimited (-1). Optional for compatibility reasons. + autobackup_quotas: typing.Optional[AutobackupQuotas] # Bucket name for Backblaze, path for some other providers location_name: typing.Optional[str] location_id: typing.Optional[str] @@ -42,6 +45,7 @@ class Backup: autobackup_period=Backups.autobackup_period_minutes(), location_name=Backups.provider().location, location_id=Backups.provider().repo_id, + autobackup_quotas=Backups.autobackup_quotas(), ) @strawberry.field diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index e53ce2a..9681e7b 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -4,6 +4,7 @@ from tests.common import generate_backup_query from selfprivacy_api.graphql.common_types.service import service_to_graphql_service +from selfprivacy_api.graphql.common_types.backup import AutobackupQuotas from selfprivacy_api.jobs import Jobs, JobStatus API_RELOAD_SNAPSHOTS = """ @@ -38,6 +39,28 @@ mutation TestAutobackupPeriod($period: Int) { } """ + +API_SET_AUTOBACKUP_QUOTAS_MUTATION = """ +mutation TestAutobackupQuotas($input: SetAutobackupQuotasInput!) { + backup { + setAutobackupQuotas(quotas: $input) { + success + message + code + configuration { + provider + encryptionKey + isInitialized + autobackupPeriod + locationName + locationId + autobackupQuotas + } + } + } +} +""" + API_REMOVE_REPOSITORY_MUTATION = """ mutation TestRemoveRepo { backup { @@ -177,6 +200,17 @@ def api_set_period(authorized_client, period): return response +def api_set_quotas(authorized_client, quotas): + response = authorized_client.post( + "/graphql", + json={ + "query": API_SET_AUTOBACKUP_QUOTAS_MUTATION, + "variables": {"input": {"quotas": quotas}}, + }, + ) + return response + + def api_remove(authorized_client): response = authorized_client.post( "/graphql", @@ -323,6 +357,22 @@ def test_remove(authorized_client, generic_userdata): assert configuration["isInitialized"] is False +def test_autobackup_quotas_nonzero(authorized_client): + quotas = AutobackupQuotas( + daily=2, + weekly=4, + monthly=13, + yearly=14, + total=3, + ) + response = api_set_quotas(authorized_client, quotas) + data = get_data(response)["backup"]["setAutobackupQuotas"] + assert_ok(data) + + configuration = data["configuration"] + assert configuration["autobackupQuotas"] == quotas + + def test_autobackup_period_nonzero(authorized_client): new_period = 11 response = api_set_period(authorized_client, new_period) From ad9384c850d0249db4f30bb58f472d8b3013a2ba Mon Sep 17 00:00:00 2001 From: Inex Code Date: Wed, 30 Aug 2023 12:03:19 +0300 Subject: [PATCH 077/246] fix(graphql): backup quotas field typing --- selfprivacy_api/backup/time.py | 1 + .../graphql/common_types/backup.py | 5 +++++ .../graphql/mutations/backup_mutations.py | 11 ++-------- selfprivacy_api/graphql/queries/backup.py | 2 +- tests/test_graphql/test_api_backup.py | 21 +++++++++++++------ 5 files changed, 24 insertions(+), 16 deletions(-) diff --git a/selfprivacy_api/backup/time.py b/selfprivacy_api/backup/time.py index aba12bd..9e34211 100644 --- a/selfprivacy_api/backup/time.py +++ b/selfprivacy_api/backup/time.py @@ -24,5 +24,6 @@ def same_month(a: datetime, b: datetime) -> bool: def same_year(a: datetime, b: datetime) -> bool: return a.year == b.year + def same_lifetime_of_the_universe(a: datetime, b: datetime) -> bool: return True diff --git a/selfprivacy_api/graphql/common_types/backup.py b/selfprivacy_api/graphql/common_types/backup.py index 3d5b5aa..cc03936 100644 --- a/selfprivacy_api/graphql/common_types/backup.py +++ b/selfprivacy_api/graphql/common_types/backup.py @@ -29,3 +29,8 @@ class _AutobackupQuotas(BaseModel): @strawberry.experimental.pydantic.type(model=_AutobackupQuotas, all_fields=True) class AutobackupQuotas: pass + + +@strawberry.experimental.pydantic.input(model=_AutobackupQuotas, all_fields=True) +class AutobackupQuotasInput: + pass diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index babbcf8..dcfebff 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -12,8 +12,8 @@ from selfprivacy_api.graphql.queries.backup import Backup from selfprivacy_api.graphql.queries.providers import BackupProvider from selfprivacy_api.graphql.common_types.jobs import job_to_api_job from selfprivacy_api.graphql.common_types.backup import ( + AutobackupQuotasInput, RestoreStrategy, - AutobackupQuotas, ) from selfprivacy_api.backup import Backups @@ -36,13 +36,6 @@ class InitializeRepositoryInput: password: str -@strawberry.input -class SetAutobackupQuotasInput: - """A single field input to reuse AutobackupQuotas""" - - quotas: AutobackupQuotas - - @strawberry.type class GenericBackupConfigReturn(MutationReturnInterface): """Generic backup config return""" @@ -102,7 +95,7 @@ class BackupMutations: @strawberry.mutation(permission_classes=[IsAuthenticated]) def set_autobackup_quotas( - self, quotas: SetAutobackupQuotasInput + self, quotas: AutobackupQuotasInput ) -> GenericBackupConfigReturn: """ Set autobackup quotas. diff --git a/selfprivacy_api/graphql/queries/backup.py b/selfprivacy_api/graphql/queries/backup.py index e03215d..6d47a8c 100644 --- a/selfprivacy_api/graphql/queries/backup.py +++ b/selfprivacy_api/graphql/queries/backup.py @@ -28,7 +28,7 @@ class BackupConfiguration: # If none, autobackups are disabled autobackup_period: typing.Optional[int] # None is equal to all quotas being unlimited (-1). Optional for compatibility reasons. - autobackup_quotas: typing.Optional[AutobackupQuotas] + autobackup_quotas: AutobackupQuotas # Bucket name for Backblaze, path for some other providers location_name: typing.Optional[str] location_id: typing.Optional[str] diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index 9681e7b..e8de4a1 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -4,7 +4,10 @@ from tests.common import generate_backup_query from selfprivacy_api.graphql.common_types.service import service_to_graphql_service -from selfprivacy_api.graphql.common_types.backup import AutobackupQuotas +from selfprivacy_api.graphql.common_types.backup import ( + _AutobackupQuotas, + AutobackupQuotas, +) from selfprivacy_api.jobs import Jobs, JobStatus API_RELOAD_SNAPSHOTS = """ @@ -41,7 +44,7 @@ mutation TestAutobackupPeriod($period: Int) { API_SET_AUTOBACKUP_QUOTAS_MUTATION = """ -mutation TestAutobackupQuotas($input: SetAutobackupQuotasInput!) { +mutation TestAutobackupQuotas($input: AutobackupQuotasInput!) { backup { setAutobackupQuotas(quotas: $input) { success @@ -54,7 +57,13 @@ mutation TestAutobackupQuotas($input: SetAutobackupQuotasInput!) { autobackupPeriod locationName locationId - autobackupQuotas + autobackupQuotas { + daily + weekly + monthly + yearly + total + } } } } @@ -200,12 +209,12 @@ def api_set_period(authorized_client, period): return response -def api_set_quotas(authorized_client, quotas): +def api_set_quotas(authorized_client, quotas: _AutobackupQuotas): response = authorized_client.post( "/graphql", json={ "query": API_SET_AUTOBACKUP_QUOTAS_MUTATION, - "variables": {"input": {"quotas": quotas}}, + "variables": {"input": quotas.dict()}, }, ) return response @@ -358,7 +367,7 @@ def test_remove(authorized_client, generic_userdata): def test_autobackup_quotas_nonzero(authorized_client): - quotas = AutobackupQuotas( + quotas = _AutobackupQuotas( daily=2, weekly=4, monthly=13, From 56be3d9c31b972c1bb6eb03e8fb2a624b84852d9 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 8 Sep 2023 16:22:53 +0000 Subject: [PATCH 078/246] fix(backup): trim auto-snapshots on setting the quotas --- selfprivacy_api/backup/__init__.py | 3 +++ tests/test_graphql/test_backup.py | 4 +++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 336b705..7056071 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -396,6 +396,9 @@ class Backups: ) ) + for service in get_all_services(): + Backups._prune_auto_snaps(service) + # Restoring @staticmethod diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 550c56b..5daae0c 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -520,8 +520,10 @@ def test_too_many_auto(backups, dummy_service): # Retroactivity quota.total = 1 Backups.set_autobackup_quotas(quota) - snap5 = Backups.back_up(dummy_service, BackupReason.AUTO) + snaps = Backups.get_snapshots(dummy_service) + assert len(snaps) == 1 + snap5 = Backups.back_up(dummy_service, BackupReason.AUTO) snaps = Backups.get_snapshots(dummy_service) assert len(snaps) == 1 assert snap5 in snaps From dedd6a9cc949fcd176ce941b5182611e4bc7d33c Mon Sep 17 00:00:00 2001 From: Inex Code Date: Sat, 9 Sep 2023 03:26:41 +0300 Subject: [PATCH 079/246] refactor(backups): use restic-like rotation policy --- selfprivacy_api/backup/__init__.py | 113 +++++--- .../backup/backuppers/restic_backupper.py | 21 +- selfprivacy_api/backup/storage.py | 4 +- selfprivacy_api/backup/time.py | 29 -- .../graphql/common_types/backup.py | 4 +- tests/test_graphql/test_api_backup.py | 4 +- tests/test_graphql/test_backup.py | 266 +++++++++++++----- 7 files changed, 278 insertions(+), 163 deletions(-) delete mode 100644 selfprivacy_api/backup/time.py diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 7056071..dff4b3b 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -4,7 +4,7 @@ This module contains the controller class for backups. from datetime import datetime, timedelta import os from os import statvfs -from typing import List, Optional +from typing import Callable, List, Optional from selfprivacy_api.utils import ReadUserData, WriteUserData @@ -28,13 +28,7 @@ from selfprivacy_api.graphql.common_types.backup import ( BackupReason, AutobackupQuotas, ) -from selfprivacy_api.backup.time import ( - same_day, - same_month, - same_week, - same_year, - same_lifetime_of_the_universe, -) + from selfprivacy_api.models.backup.snapshot import Snapshot @@ -81,6 +75,24 @@ class NotDeadError(AssertionError): """ +class RotationBucket: + """ + Bucket object used for rotation. + Has the following mutable fields: + - the counter, int + - the lambda function which takes datetime and the int and returns the int + - the last, int + """ + + def __init__(self, counter: int, last: int, rotation_lambda): + self.counter: int = counter + self.last: int = last + self.rotation_lambda: Callable[[datetime, int], int] = rotation_lambda + + def __str__(self) -> str: + return f"Bucket(counter={self.counter}, last={self.last})" + + class Backups: """A stateless controller class for backups""" @@ -314,45 +326,54 @@ class Backups: if snap.reason == BackupReason.AUTO ] - @staticmethod - def add_snap_but_with_quotas( - new_snap: Snapshot, snaps: List[Snapshot], quotas: AutobackupQuotas - ) -> None: - quotas_map = { - same_day: quotas.daily, - same_week: quotas.weekly, - same_month: quotas.monthly, - same_year: quotas.yearly, - same_lifetime_of_the_universe: quotas.total, - } - - snaps.append(new_snap) - - for is_same_period, quota in quotas_map.items(): - if quota <= 0: - continue - - cohort = [ - snap - for snap in snaps - if is_same_period(snap.created_at, new_snap.created_at) - ] - sorted_cohort = sorted(cohort, key=lambda s: s.created_at) - n_to_kill = len(cohort) - quota - if n_to_kill > 0: - snaps_to_kill = sorted_cohort[:n_to_kill] - for snap in snaps_to_kill: - snaps.remove(snap) - @staticmethod def _prune_snaps_with_quotas(snapshots: List[Snapshot]) -> List[Snapshot]: # Function broken out for testability - sorted_snaps = sorted(snapshots, key=lambda s: s.created_at) - quotas = Backups.autobackup_quotas() + # Sorting newest first + sorted_snaps = sorted(snapshots, key=lambda s: s.created_at, reverse=True) + quotas: AutobackupQuotas = Backups.autobackup_quotas() + + buckets: list[RotationBucket] = [ + RotationBucket( + quotas.last, + -1, + lambda _, index: index, + ), + RotationBucket( + quotas.daily, + -1, + lambda date, _: date.year * 10000 + date.month * 100 + date.day, + ), + RotationBucket( + quotas.weekly, + -1, + lambda date, _: date.year * 100 + date.isocalendar()[1], + ), + RotationBucket( + quotas.monthly, + -1, + lambda date, _: date.year * 100 + date.month, + ), + RotationBucket( + quotas.yearly, + -1, + lambda date, _: date.year, + ), + ] new_snaplist: List[Snapshot] = [] - for snap in sorted_snaps: - Backups.add_snap_but_with_quotas(snap, new_snaplist, quotas) + for i, snap in enumerate(sorted_snaps): + keep_snap = False + for bucket in buckets: + if (bucket.counter > 0) or (bucket.counter == -1): + val = bucket.rotation_lambda(snap.created_at, i) + if (val != bucket.last) or (i == len(sorted_snaps) - 1): + bucket.last = val + if bucket.counter > 0: + bucket.counter -= 1 + if not keep_snap: + new_snaplist.append(snap) + keep_snap = True return new_snaplist @@ -372,27 +393,27 @@ class Backups: @staticmethod def _standardize_quotas(i: int) -> int: - if i <= 0: + if i <= -1: i = -1 return i @staticmethod def autobackup_quotas() -> AutobackupQuotas: - """everything <=0 means unlimited""" + """0 means do not keep, -1 means unlimited""" return Storage.autobackup_quotas() @staticmethod def set_autobackup_quotas(quotas: AutobackupQuotas) -> None: - """everything <=0 means unlimited""" + """0 means do not keep, -1 means unlimited""" Storage.set_autobackup_quotas( AutobackupQuotas( + last=Backups._standardize_quotas(quotas.last), daily=Backups._standardize_quotas(quotas.daily), weekly=Backups._standardize_quotas(quotas.weekly), monthly=Backups._standardize_quotas(quotas.monthly), yearly=Backups._standardize_quotas(quotas.yearly), - total=Backups._standardize_quotas(quotas.total), ) ) diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index f5467ff..b6c643b 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -5,7 +5,7 @@ import json import datetime import tempfile -from typing import List, TypeVar, Callable +from typing import List, Optional, TypeVar, Callable from collections.abc import Iterable from json.decoder import JSONDecodeError from os.path import exists, join @@ -33,12 +33,12 @@ def unlocked_repo(func: T) -> T: def inner(self: ResticBackupper, *args, **kwargs): try: return func(self, *args, **kwargs) - except Exception as e: - if "unable to create lock" in str(e): + except Exception as error: + if "unable to create lock" in str(error): self.unlock() return func(self, *args, **kwargs) else: - raise e + raise error # Above, we manually guarantee that the type returned is compatible. return inner # type: ignore @@ -85,7 +85,10 @@ class ResticBackupper(AbstractBackupper): def _password_command(self): return f"echo {LocalBackupSecret.get()}" - def restic_command(self, *args, tags: List[str] = []) -> List[str]: + def restic_command(self, *args, tags: Optional[List[str]] = None) -> List[str]: + if tags is None: + tags = [] + command = [ "restic", "-o", @@ -219,7 +222,7 @@ class ResticBackupper(AbstractBackupper): ) from error @staticmethod - def _snapshot_id_from_backup_messages(messages) -> Snapshot: + def _snapshot_id_from_backup_messages(messages) -> str: for message in messages: if message["message_type"] == "summary": # There is a discrepancy between versions of restic/rclone @@ -317,8 +320,8 @@ class ResticBackupper(AbstractBackupper): break if "unable" in line: raise ValueError(line) - except Exception as e: - raise ValueError("could not lock repository") from e + except Exception as error: + raise ValueError("could not lock repository") from error @unlocked_repo def restored_size(self, snapshot_id: str) -> int: @@ -415,6 +418,8 @@ class ResticBackupper(AbstractBackupper): forget_command = self.restic_command( "forget", snapshot_id, + # TODO: prune should be done in a separate process + "--prune", ) with subprocess.Popen( diff --git a/selfprivacy_api/backup/storage.py b/selfprivacy_api/backup/storage.py index 86b92f3..ddfd176 100644 --- a/selfprivacy_api/backup/storage.py +++ b/selfprivacy_api/backup/storage.py @@ -193,11 +193,11 @@ class Storage: ) if quotas_model is None: unlimited_quotas = AutobackupQuotas( + last=-1, daily=-1, weekly=-1, monthly=-1, yearly=-1, - total=-1, ) return unlimited_quotas - return AutobackupQuotas.from_pydantic(quotas_model) + return AutobackupQuotas.from_pydantic(quotas_model) # pylint: disable=no-member diff --git a/selfprivacy_api/backup/time.py b/selfprivacy_api/backup/time.py deleted file mode 100644 index 9e34211..0000000 --- a/selfprivacy_api/backup/time.py +++ /dev/null @@ -1,29 +0,0 @@ -from datetime import datetime, timedelta, time - - -def same_day(a: datetime, b: datetime) -> bool: - return a.date() == b.date() - - -def same_week(a: datetime, b: datetime) -> bool: - # doing the hard way because weeks traverse the edges of years - zerobased_weekday = a.isoweekday() - 1 - start_of_day = datetime.combine(a.date(), time.min) - start_of_week = start_of_day - timedelta(days=zerobased_weekday) - end_of_week = start_of_week + timedelta(days=7) - - if b >= start_of_week and b <= end_of_week: - return True - return False - - -def same_month(a: datetime, b: datetime) -> bool: - return a.month == b.month and a.year == b.year - - -def same_year(a: datetime, b: datetime) -> bool: - return a.year == b.year - - -def same_lifetime_of_the_universe(a: datetime, b: datetime) -> bool: - return True diff --git a/selfprivacy_api/graphql/common_types/backup.py b/selfprivacy_api/graphql/common_types/backup.py index cc03936..953009d 100644 --- a/selfprivacy_api/graphql/common_types/backup.py +++ b/selfprivacy_api/graphql/common_types/backup.py @@ -1,7 +1,7 @@ """Backup""" # pylint: disable=too-few-public-methods -import strawberry from enum import Enum +import strawberry from pydantic import BaseModel @@ -19,11 +19,11 @@ class BackupReason(Enum): class _AutobackupQuotas(BaseModel): + last: int daily: int weekly: int monthly: int yearly: int - total: int @strawberry.experimental.pydantic.type(model=_AutobackupQuotas, all_fields=True) diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index e8de4a1..14410e3 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -58,11 +58,11 @@ mutation TestAutobackupQuotas($input: AutobackupQuotasInput!) { locationName locationId autobackupQuotas { + last daily weekly monthly yearly - total } } } @@ -368,11 +368,11 @@ def test_remove(authorized_client, generic_userdata): def test_autobackup_quotas_nonzero(authorized_client): quotas = _AutobackupQuotas( + last=3, daily=2, weekly=4, monthly=13, yearly=14, - total=3, ) response = api_set_quotas(authorized_client, quotas) data = get_data(response)["backup"]["setAutobackupQuotas"] diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 5daae0c..edef6d0 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -305,11 +305,19 @@ def test_backup_reasons(backups, dummy_service): unlimited_quotas = AutobackupQuotas( + last=-1, daily=-1, weekly=-1, monthly=-1, yearly=-1, - total=-1, +) + +zero_quotas = AutobackupQuotas( + last=0, + daily=0, + weekly=0, + monthly=0, + yearly=0, ) @@ -321,20 +329,66 @@ def test_get_empty_quotas(backups): def test_set_quotas(backups): quotas = AutobackupQuotas( + last=3, daily=2343, weekly=343, monthly=0, yearly=-34556, - total=563, ) Backups.set_autobackup_quotas(quotas) assert Backups.autobackup_quotas() == AutobackupQuotas( + last=3, daily=2343, weekly=343, + monthly=0, + yearly=-1, + ) + + +def test_set_zero_quotas(backups): + quotas = AutobackupQuotas( + last=0, + daily=0, + weekly=0, + monthly=0, + yearly=0, + ) + Backups.set_autobackup_quotas(quotas) + assert Backups.autobackup_quotas() == zero_quotas + + +def test_set_unlimited_quotas(backups): + quotas = AutobackupQuotas( + last=-1, + daily=-1, + weekly=-1, monthly=-1, yearly=-1, - total=563, ) + Backups.set_autobackup_quotas(quotas) + assert Backups.autobackup_quotas() == unlimited_quotas + + +def test_set_zero_quotas_after_unlimited(backups): + quotas = AutobackupQuotas( + last=-1, + daily=-1, + weekly=-1, + monthly=-1, + yearly=-1, + ) + Backups.set_autobackup_quotas(quotas) + assert Backups.autobackup_quotas() == unlimited_quotas + + quotas = AutobackupQuotas( + last=0, + daily=0, + weekly=0, + monthly=0, + yearly=0, + ) + Backups.set_autobackup_quotas(quotas) + assert Backups.autobackup_quotas() == zero_quotas def dummy_snapshot(date: datetime): @@ -351,15 +405,24 @@ def test_autobackup_snapshots_pruning(backups): now = datetime(year=2023, month=1, day=25, hour=10) snaps = [ - dummy_snapshot(now - timedelta(days=365 * 2)), - dummy_snapshot(now - timedelta(days=20)), - dummy_snapshot(now - timedelta(days=2)), - dummy_snapshot(now - timedelta(days=1, hours=3)), - dummy_snapshot(now - timedelta(days=1, hours=2)), - dummy_snapshot(now - timedelta(days=1)), - dummy_snapshot(now - timedelta(hours=2)), - dummy_snapshot(now - timedelta(minutes=5)), dummy_snapshot(now), + dummy_snapshot(now - timedelta(minutes=5)), + dummy_snapshot(now - timedelta(hours=2)), + dummy_snapshot(now - timedelta(hours=5)), + dummy_snapshot(now - timedelta(days=1)), + dummy_snapshot(now - timedelta(days=1, hours=2)), + dummy_snapshot(now - timedelta(days=1, hours=3)), + dummy_snapshot(now - timedelta(days=2)), + dummy_snapshot(now - timedelta(days=7)), + dummy_snapshot(now - timedelta(days=12)), + dummy_snapshot(now - timedelta(days=23)), + dummy_snapshot(now - timedelta(days=28)), + dummy_snapshot(now - timedelta(days=32)), + dummy_snapshot(now - timedelta(days=47)), + dummy_snapshot(now - timedelta(days=64)), + dummy_snapshot(now - timedelta(days=84)), + dummy_snapshot(now - timedelta(days=104)), + dummy_snapshot(now - timedelta(days=365 * 2)), ] old_len = len(snaps) @@ -367,135 +430,190 @@ def test_autobackup_snapshots_pruning(backups): Backups.set_autobackup_quotas(quotas) assert Backups._prune_snaps_with_quotas(snaps) == snaps - quotas = copy(unlimited_quotas) + quotas = copy(zero_quotas) + quotas.last = 2 quotas.daily = 2 Backups.set_autobackup_quotas(quotas) - pruned_snaps = Backups._prune_snaps_with_quotas(snaps) - assert pruned_snaps == [ - dummy_snapshot(now - timedelta(days=365 * 2)), - dummy_snapshot(now - timedelta(days=20)), - dummy_snapshot(now - timedelta(days=2)), - dummy_snapshot(now - timedelta(days=1, hours=2)), - dummy_snapshot(now - timedelta(days=1)), - dummy_snapshot(now - timedelta(minutes=5)), + snaps_to_keep = Backups._prune_snaps_with_quotas(snaps) + assert snaps_to_keep == [ dummy_snapshot(now), + dummy_snapshot(now - timedelta(minutes=5)), + # dummy_snapshot(now - timedelta(hours=2)), + # dummy_snapshot(now - timedelta(hours=5)), + dummy_snapshot(now - timedelta(days=1)), + # dummy_snapshot(now - timedelta(days=1, hours=2)), + # dummy_snapshot(now - timedelta(days=1, hours=3)), + # dummy_snapshot(now - timedelta(days=2)), + # dummy_snapshot(now - timedelta(days=7)), + # dummy_snapshot(now - timedelta(days=12)), + # dummy_snapshot(now - timedelta(days=23)), + # dummy_snapshot(now - timedelta(days=28)), + # dummy_snapshot(now - timedelta(days=32)), + # dummy_snapshot(now - timedelta(days=47)), + # dummy_snapshot(now - timedelta(days=64)), + # dummy_snapshot(now - timedelta(days=84)), + # dummy_snapshot(now - timedelta(days=104)), + # dummy_snapshot(now - timedelta(days=365 * 2)), ] # checking that this function does not mutate the argument - assert snaps != pruned_snaps + assert snaps != snaps_to_keep assert len(snaps) == old_len - quotas = copy(unlimited_quotas) + quotas = copy(zero_quotas) quotas.weekly = 4 Backups.set_autobackup_quotas(quotas) - pruned_snaps = Backups._prune_snaps_with_quotas(snaps) - assert pruned_snaps == [ - dummy_snapshot(now - timedelta(days=365 * 2)), - dummy_snapshot(now - timedelta(days=20)), - dummy_snapshot(now - timedelta(days=1)), - dummy_snapshot(now - timedelta(hours=2)), - dummy_snapshot(now - timedelta(minutes=5)), + snaps_to_keep = Backups._prune_snaps_with_quotas(snaps) + assert snaps_to_keep == [ dummy_snapshot(now), + # dummy_snapshot(now - timedelta(minutes=5)), + # dummy_snapshot(now - timedelta(hours=2)), + # dummy_snapshot(now - timedelta(hours=5)), + # dummy_snapshot(now - timedelta(days=1)), + # dummy_snapshot(now - timedelta(days=1, hours=2)), + # dummy_snapshot(now - timedelta(days=1, hours=3)), + # dummy_snapshot(now - timedelta(days=2)), + dummy_snapshot(now - timedelta(days=7)), + dummy_snapshot(now - timedelta(days=12)), + dummy_snapshot(now - timedelta(days=23)), + # dummy_snapshot(now - timedelta(days=28)), + # dummy_snapshot(now - timedelta(days=32)), + # dummy_snapshot(now - timedelta(days=47)), + # dummy_snapshot(now - timedelta(days=64)), + # dummy_snapshot(now - timedelta(days=84)), + # dummy_snapshot(now - timedelta(days=104)), + # dummy_snapshot(now - timedelta(days=365 * 2)), ] - quotas = copy(unlimited_quotas) + quotas = copy(zero_quotas) quotas.monthly = 7 Backups.set_autobackup_quotas(quotas) - pruned_snaps = Backups._prune_snaps_with_quotas(snaps) - assert pruned_snaps == [ - dummy_snapshot(now - timedelta(days=365 * 2)), - dummy_snapshot(now - timedelta(days=2)), - dummy_snapshot(now - timedelta(days=1, hours=3)), - dummy_snapshot(now - timedelta(days=1, hours=2)), - dummy_snapshot(now - timedelta(days=1)), - dummy_snapshot(now - timedelta(hours=2)), - dummy_snapshot(now - timedelta(minutes=5)), + snaps_to_keep = Backups._prune_snaps_with_quotas(snaps) + assert snaps_to_keep == [ dummy_snapshot(now), + # dummy_snapshot(now - timedelta(minutes=5)), + # dummy_snapshot(now - timedelta(hours=2)), + # dummy_snapshot(now - timedelta(hours=5)), + # dummy_snapshot(now - timedelta(days=1)), + # dummy_snapshot(now - timedelta(days=1, hours=2)), + # dummy_snapshot(now - timedelta(days=1, hours=3)), + # dummy_snapshot(now - timedelta(days=2)), + # dummy_snapshot(now - timedelta(days=7)), + # dummy_snapshot(now - timedelta(days=12)), + # dummy_snapshot(now - timedelta(days=23)), + dummy_snapshot(now - timedelta(days=28)), + # dummy_snapshot(now - timedelta(days=32)), + # dummy_snapshot(now - timedelta(days=47)), + dummy_snapshot(now - timedelta(days=64)), + # dummy_snapshot(now - timedelta(days=84)), + dummy_snapshot(now - timedelta(days=104)), + dummy_snapshot(now - timedelta(days=365 * 2)), ] def test_autobackup_snapshots_pruning_yearly(backups): snaps = [ - dummy_snapshot(datetime(year=2023, month=2, day=1)), - dummy_snapshot(datetime(year=2023, month=3, day=1)), - dummy_snapshot(datetime(year=2023, month=4, day=1)), dummy_snapshot(datetime(year=2055, month=3, day=1)), + dummy_snapshot(datetime(year=2055, month=2, day=1)), + dummy_snapshot(datetime(year=2023, month=4, day=1)), + dummy_snapshot(datetime(year=2023, month=3, day=1)), + dummy_snapshot(datetime(year=2023, month=2, day=1)), + dummy_snapshot(datetime(year=2021, month=2, day=1)), ] - quotas = copy(unlimited_quotas) + quotas = copy(zero_quotas) quotas.yearly = 2 Backups.set_autobackup_quotas(quotas) - pruned_snaps = Backups._prune_snaps_with_quotas(snaps) - assert pruned_snaps == [ - dummy_snapshot(datetime(year=2023, month=3, day=1)), - dummy_snapshot(datetime(year=2023, month=4, day=1)), + snaps_to_keep = Backups._prune_snaps_with_quotas(snaps) + assert snaps_to_keep == [ dummy_snapshot(datetime(year=2055, month=3, day=1)), + dummy_snapshot(datetime(year=2023, month=4, day=1)), ] def test_autobackup_snapshots_pruning_bottleneck(backups): now = datetime(year=2023, month=1, day=25, hour=10) snaps = [ - dummy_snapshot(now - timedelta(hours=4)), - dummy_snapshot(now - timedelta(hours=3)), - dummy_snapshot(now - timedelta(hours=2)), - dummy_snapshot(now - timedelta(minutes=5)), dummy_snapshot(now), + dummy_snapshot(now - timedelta(minutes=5)), + dummy_snapshot(now - timedelta(hours=2)), + dummy_snapshot(now - timedelta(hours=3)), + dummy_snapshot(now - timedelta(hours=4)), ] - yearly_quota = copy(unlimited_quotas) + yearly_quota = copy(zero_quotas) yearly_quota.yearly = 2 - monthly_quota = copy(unlimited_quotas) + monthly_quota = copy(zero_quotas) monthly_quota.monthly = 2 - weekly_quota = copy(unlimited_quotas) + weekly_quota = copy(zero_quotas) weekly_quota.weekly = 2 - daily_quota = copy(unlimited_quotas) + daily_quota = copy(zero_quotas) daily_quota.daily = 2 - total_quota = copy(unlimited_quotas) - total_quota.total = 2 + last_quota = copy(zero_quotas) + last_quota.last = 1 + last_quota.yearly = 2 - for quota in [total_quota, yearly_quota, monthly_quota, weekly_quota, daily_quota]: + for quota in [last_quota, yearly_quota, monthly_quota, weekly_quota, daily_quota]: + print(quota) Backups.set_autobackup_quotas(quota) - pruned_snaps = Backups._prune_snaps_with_quotas(snaps) - assert pruned_snaps == [ - dummy_snapshot(now - timedelta(minutes=5)), + snaps_to_keep = Backups._prune_snaps_with_quotas(snaps) + assert snaps_to_keep == [ dummy_snapshot(now), + # If there is a vacant quota, we should keep the last snapshot even if it doesn't fit + dummy_snapshot(now - timedelta(hours=4)), ] def test_autobackup_snapshots_pruning_edgeweek(backups): # jan 1 2023 is Sunday snaps = [ - dummy_snapshot(datetime(year=2022, month=12, day=30)), - dummy_snapshot(datetime(year=2022, month=12, day=31)), - dummy_snapshot(datetime(year=2023, month=1, day=1)), dummy_snapshot(datetime(year=2023, month=1, day=6)), + dummy_snapshot(datetime(year=2023, month=1, day=1)), + dummy_snapshot(datetime(year=2022, month=12, day=31)), + dummy_snapshot(datetime(year=2022, month=12, day=30)), ] - quotas = copy(unlimited_quotas) + quotas = copy(zero_quotas) quotas.weekly = 2 Backups.set_autobackup_quotas(quotas) - pruned_snaps = Backups._prune_snaps_with_quotas(snaps) - assert pruned_snaps == [ - dummy_snapshot(datetime(year=2022, month=12, day=31)), - dummy_snapshot(datetime(year=2023, month=1, day=1)), + snaps_to_keep = Backups._prune_snaps_with_quotas(snaps) + assert snaps_to_keep == [ dummy_snapshot(datetime(year=2023, month=1, day=6)), + dummy_snapshot(datetime(year=2023, month=1, day=1)), + ] + + +def test_autobackup_snapshots_pruning_big_gap(backups): + snaps = [ + dummy_snapshot(datetime(year=2023, month=1, day=6)), + dummy_snapshot(datetime(year=2023, month=1, day=2)), + dummy_snapshot(datetime(year=2022, month=10, day=31)), + dummy_snapshot(datetime(year=2022, month=10, day=30)), + ] + quotas = copy(zero_quotas) + quotas.weekly = 2 + Backups.set_autobackup_quotas(quotas) + + snaps_to_keep = Backups._prune_snaps_with_quotas(snaps) + assert snaps_to_keep == [ + dummy_snapshot(datetime(year=2023, month=1, day=6)), + dummy_snapshot(datetime(year=2022, month=10, day=31)), ] def test_too_many_auto(backups, dummy_service): assert Backups.autobackup_quotas() - quota = copy(unlimited_quotas) - quota.total = 2 + quota = copy(zero_quotas) + quota.last = 2 Backups.set_autobackup_quotas(quota) - assert Backups.autobackup_quotas().total == 2 + assert Backups.autobackup_quotas().last == 2 snap = Backups.back_up(dummy_service, BackupReason.AUTO) assert len(Backups.get_snapshots(dummy_service)) == 1 @@ -509,7 +627,7 @@ def test_too_many_auto(backups, dummy_service): assert snap3 in snaps assert snap not in snaps - quota.total = -1 + quota.last = -1 Backups.set_autobackup_quotas(quota) snap4 = Backups.back_up(dummy_service, BackupReason.AUTO) @@ -518,7 +636,7 @@ def test_too_many_auto(backups, dummy_service): assert snap4 in snaps # Retroactivity - quota.total = 1 + quota.last = 1 Backups.set_autobackup_quotas(quota) snaps = Backups.get_snapshots(dummy_service) assert len(snaps) == 1 From 450a998ea638fc572027fc8433326ceaa496fc62 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Sat, 9 Sep 2023 03:32:57 +0300 Subject: [PATCH 080/246] chore:bump version --- selfprivacy_api/dependencies.py | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/dependencies.py b/selfprivacy_api/dependencies.py index fb974e8..9e144fd 100644 --- a/selfprivacy_api/dependencies.py +++ b/selfprivacy_api/dependencies.py @@ -27,4 +27,4 @@ async def get_token_header( def get_api_version() -> str: """Get API version""" - return "2.3.0" + return "2.4.0" diff --git a/setup.py b/setup.py index 684f54f..5ce3947 100755 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import setup, find_packages setup( name="selfprivacy_api", - version="2.3.0", + version="2.4.0", packages=find_packages(), scripts=[ "selfprivacy_api/app.py", From 8b840d4c2cff45845b01a9b654654eb620f9333f Mon Sep 17 00:00:00 2001 From: Inex Code Date: Sat, 9 Sep 2023 09:52:20 +0300 Subject: [PATCH 081/246] fix(backups): expose snapshot reason and set the default value --- selfprivacy_api/backup/__init__.py | 22 +++++++++---------- selfprivacy_api/dependencies.py | 2 +- .../graphql/common_types/service.py | 2 ++ selfprivacy_api/graphql/queries/backup.py | 1 + selfprivacy_api/models/backup/snapshot.py | 2 +- setup.py | 2 +- tests/test_graphql/test_api_backup.py | 1 + 7 files changed, 18 insertions(+), 14 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 265ee0f..72d1567 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -335,27 +335,27 @@ class Backups: buckets: list[RotationBucket] = [ RotationBucket( - quotas.last, + quotas.last, # type: ignore -1, lambda _, index: index, ), RotationBucket( - quotas.daily, + quotas.daily, # type: ignore -1, lambda date, _: date.year * 10000 + date.month * 100 + date.day, ), RotationBucket( - quotas.weekly, + quotas.weekly, # type: ignore -1, lambda date, _: date.year * 100 + date.isocalendar()[1], ), RotationBucket( - quotas.monthly, + quotas.monthly, # type: ignore -1, lambda date, _: date.year * 100 + date.month, ), RotationBucket( - quotas.yearly, + quotas.yearly, # type: ignore -1, lambda date, _: date.year, ), @@ -409,11 +409,11 @@ class Backups: Storage.set_autobackup_quotas( AutobackupQuotas( - last=Backups._standardize_quotas(quotas.last), - daily=Backups._standardize_quotas(quotas.daily), - weekly=Backups._standardize_quotas(quotas.weekly), - monthly=Backups._standardize_quotas(quotas.monthly), - yearly=Backups._standardize_quotas(quotas.yearly), + last=Backups._standardize_quotas(quotas.last), # type: ignore + daily=Backups._standardize_quotas(quotas.daily), # type: ignore + weekly=Backups._standardize_quotas(quotas.weekly), # type: ignore + monthly=Backups._standardize_quotas(quotas.monthly), # type: ignore + yearly=Backups._standardize_quotas(quotas.yearly), # type: ignore ) ) @@ -438,7 +438,7 @@ class Backups: job: Job, ) -> None: Jobs.update( - job, status=JobStatus.CREATED, status_text=f"Waiting for pre-restore backup" + job, status=JobStatus.CREATED, status_text="Waiting for pre-restore backup" ) failsafe_snapshot = Backups.back_up(service, BackupReason.PRE_RESTORE) diff --git a/selfprivacy_api/dependencies.py b/selfprivacy_api/dependencies.py index 9e144fd..35cf9e1 100644 --- a/selfprivacy_api/dependencies.py +++ b/selfprivacy_api/dependencies.py @@ -27,4 +27,4 @@ async def get_token_header( def get_api_version() -> str: """Get API version""" - return "2.4.0" + return "2.4.1" diff --git a/selfprivacy_api/graphql/common_types/service.py b/selfprivacy_api/graphql/common_types/service.py index 836a3df..319ce3e 100644 --- a/selfprivacy_api/graphql/common_types/service.py +++ b/selfprivacy_api/graphql/common_types/service.py @@ -2,6 +2,7 @@ from enum import Enum import typing import strawberry import datetime +from selfprivacy_api.graphql.common_types.backup import BackupReason from selfprivacy_api.graphql.common_types.dns import DnsRecord from selfprivacy_api.services import get_service_by_id, get_services_by_location @@ -114,6 +115,7 @@ class SnapshotInfo: id: str service: Service created_at: datetime.datetime + reason: BackupReason def service_to_graphql_service(service: ServiceInterface) -> Service: diff --git a/selfprivacy_api/graphql/queries/backup.py b/selfprivacy_api/graphql/queries/backup.py index 6d47a8c..fc5f78a 100644 --- a/selfprivacy_api/graphql/queries/backup.py +++ b/selfprivacy_api/graphql/queries/backup.py @@ -77,6 +77,7 @@ class Backup: id=snap.id, service=service, created_at=snap.created_at, + reason=snap.reason, ) result.append(graphql_snap) return result diff --git a/selfprivacy_api/models/backup/snapshot.py b/selfprivacy_api/models/backup/snapshot.py index 28ad661..b2831e7 100644 --- a/selfprivacy_api/models/backup/snapshot.py +++ b/selfprivacy_api/models/backup/snapshot.py @@ -8,4 +8,4 @@ class Snapshot(BaseModel): id: str service_name: str created_at: datetime.datetime - reason: BackupReason + reason: BackupReason = BackupReason.EXPLICIT diff --git a/setup.py b/setup.py index 5ce3947..44d2336 100755 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import setup, find_packages setup( name="selfprivacy_api", - version="2.4.0", + version="2.4.1", packages=find_packages(), scripts=[ "selfprivacy_api/app.py", diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index 14410e3..225abf7 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -145,6 +145,7 @@ allSnapshots { id } createdAt + reason } """ From 62d5de0dd63ed1afa3488276f50e5010ff51787e Mon Sep 17 00:00:00 2001 From: Inex Code Date: Tue, 3 Oct 2023 16:41:09 +0300 Subject: [PATCH 082/246] refactor(ssh): Remove unused add_root_ssh_key function --- selfprivacy_api/actions/ssh.py | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/selfprivacy_api/actions/ssh.py b/selfprivacy_api/actions/ssh.py index 3f79ff8..8a92735 100644 --- a/selfprivacy_api/actions/ssh.py +++ b/selfprivacy_api/actions/ssh.py @@ -49,19 +49,6 @@ def set_ssh_settings( data["ssh"]["passwordAuthentication"] = password_authentication -def add_root_ssh_key(public_key: str): - with WriteUserData() as data: - if "ssh" not in data: - data["ssh"] = {} - if "rootKeys" not in data["ssh"]: - data["ssh"]["rootKeys"] = [] - # Return 409 if key already in array - for key in data["ssh"]["rootKeys"]: - if key == public_key: - raise KeyAlreadyExists() - data["ssh"]["rootKeys"].append(public_key) - - class KeyAlreadyExists(Exception): """Key already exists""" From cebb71ff4a5851864f66699a679d5875a3142154 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Tue, 3 Oct 2023 16:51:06 +0300 Subject: [PATCH 083/246] feat(ssh): Add support for ecdsa keys --- selfprivacy_api/graphql/mutations/users_mutations.py | 2 +- selfprivacy_api/utils/__init__.py | 6 ++++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/selfprivacy_api/graphql/mutations/users_mutations.py b/selfprivacy_api/graphql/mutations/users_mutations.py index f7317fb..57825bc 100644 --- a/selfprivacy_api/graphql/mutations/users_mutations.py +++ b/selfprivacy_api/graphql/mutations/users_mutations.py @@ -147,7 +147,7 @@ class UsersMutations: except InvalidPublicKey: return UserMutationReturn( success=False, - message="Invalid key type. Only ssh-ed25519 and ssh-rsa are supported", + message="Invalid key type. Only ssh-ed25519, ssh-rsa and ecdsa are supported", code=400, ) except UserNotFound: diff --git a/selfprivacy_api/utils/__init__.py b/selfprivacy_api/utils/__init__.py index 96bf9d8..40ed5b6 100644 --- a/selfprivacy_api/utils/__init__.py +++ b/selfprivacy_api/utils/__init__.py @@ -88,10 +88,12 @@ class ReadUserData(object): def validate_ssh_public_key(key): - """Validate SSH public key. It may be ssh-ed25519 or ssh-rsa.""" + """Validate SSH public key. + It may be ssh-ed25519, ssh-rsa or ecdsa-sha2-nistp256.""" if not key.startswith("ssh-ed25519"): if not key.startswith("ssh-rsa"): - return False + if not key.startswith("ecdsa-sha2-nistp256"): + return False return True From 07aaa21602e9afb0088b06ba17f973b30d49cb5d Mon Sep 17 00:00:00 2001 From: Inex Code Date: Mon, 9 Oct 2023 12:45:22 +0300 Subject: [PATCH 084/246] chore: bump version --- selfprivacy_api/dependencies.py | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/dependencies.py b/selfprivacy_api/dependencies.py index 35cf9e1..1955601 100644 --- a/selfprivacy_api/dependencies.py +++ b/selfprivacy_api/dependencies.py @@ -27,4 +27,4 @@ async def get_token_header( def get_api_version() -> str: """Get API version""" - return "2.4.1" + return "2.4.2" diff --git a/setup.py b/setup.py index 44d2336..399b157 100755 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import setup, find_packages setup( name="selfprivacy_api", - version="2.4.1", + version="2.4.2", packages=find_packages(), scripts=[ "selfprivacy_api/app.py", From 0912ac183198f7882d60dafd1d3d1333043752d9 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 22 Sep 2023 17:56:04 +0000 Subject: [PATCH 085/246] feature(jobs): set ttl via method --- selfprivacy_api/jobs/__init__.py | 8 ++++++++ tests/test_jobs.py | 6 ++++++ 2 files changed, 14 insertions(+) diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index 3fe452b..05b5ab8 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -224,6 +224,14 @@ class Jobs: return job + @staticmethod + def set_expiration(job: Job, expiration_seconds: int) -> Job: + redis = RedisPool().get_connection() + key = _redis_key_from_uuid(job.uid) + if redis.exists(key): + redis.expire(key, expiration_seconds) + return job + @staticmethod def get_job(uid: str) -> typing.Optional[Job]: """ diff --git a/tests/test_jobs.py b/tests/test_jobs.py index 0a4271e..c0e2125 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -49,6 +49,12 @@ def test_remove_get_nonexistent(jobs_with_one_job): assert jobs_with_one_job.get_job(uid_str) is None +def test_set_zeroing_ttl(jobs_with_one_job): + test_job = jobs_with_one_job.get_jobs()[0] + jobs_with_one_job.set_expiration(test_job, 0) + assert jobs_with_one_job.get_jobs() == [] + + def test_jobs(jobs_with_one_job): jobs = jobs_with_one_job test_job = jobs_with_one_job.get_jobs()[0] From 26c0a8fafe3a76fddffedcbf2c5b8f6c21a78020 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 22 Sep 2023 18:09:39 +0000 Subject: [PATCH 086/246] feature(jobs): set 1 hour ttl for successful autobackup jobs --- selfprivacy_api/backup/__init__.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 72d1567..aa11f7f 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -56,6 +56,8 @@ BACKUP_PROVIDER_ENVS = { "location": "BACKUP_LOCATION", } +AUTOBACKUP_JOB_EXPIRATION_SECONDS = 60 * 60 # one hour + class NotDeadError(AssertionError): """ @@ -316,6 +318,8 @@ class Backups: raise error Jobs.update(job, status=JobStatus.FINISHED) + if reason in [BackupReason.AUTO, BackupReason.PRE_RESTORE]: + Jobs.set_expiration(job, AUTOBACKUP_JOB_EXPIRATION_SECONDS) return snapshot @staticmethod From ece3258c7858ebf42c08d339dbd2320152acab26 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 22 Sep 2023 18:13:22 +0000 Subject: [PATCH 087/246] test(jobs): test out setting ttl --- tests/test_jobs.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tests/test_jobs.py b/tests/test_jobs.py index c0e2125..64cf457 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -1,6 +1,7 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument import pytest +from time import sleep from selfprivacy_api.jobs import Jobs, JobStatus import selfprivacy_api.jobs as jobsmodule @@ -55,6 +56,14 @@ def test_set_zeroing_ttl(jobs_with_one_job): assert jobs_with_one_job.get_jobs() == [] +def test_not_zeroing_ttl(jobs_with_one_job): + test_job = jobs_with_one_job.get_jobs()[0] + jobs_with_one_job.set_expiration(test_job, 1) + assert len(jobs_with_one_job.get_jobs()) == 1 + sleep(1.2) + assert len(jobs_with_one_job.get_jobs()) == 0 + + def test_jobs(jobs_with_one_job): jobs = jobs_with_one_job test_job = jobs_with_one_job.get_jobs()[0] From b7cd703eaafa9e01a3a1716f2a4f3dd0c61da2ad Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 25 Oct 2023 14:53:11 +0000 Subject: [PATCH 088/246] fix(tokens): missing timezone import --- selfprivacy_api/repositories/tokens/redis_tokens_repository.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py index 944c9b9..834794c 100644 --- a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py @@ -4,6 +4,7 @@ Token repository using Redis as backend. from typing import Any, Optional from datetime import datetime from hashlib import md5 +from datetime import timezone from selfprivacy_api.repositories.tokens.abstract_tokens_repository import ( AbstractTokensRepository, @@ -53,6 +54,7 @@ class RedisTokensRepository(AbstractTokensRepository): token = self._token_from_hash(key) if token == input_token: return key + return None def delete_token(self, input_token: Token) -> None: """Delete the token""" @@ -148,6 +150,7 @@ class RedisTokensRepository(AbstractTokensRepository): if token is not None: token.created_at = token.created_at.replace(tzinfo=None) return token + return None def _recovery_key_from_hash(self, redis_key: str) -> Optional[RecoveryKey]: return self._hash_as_model(redis_key, RecoveryKey) From 3deaeb28c59a68aa186bfdece5f466068cf87be5 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 1 Nov 2023 15:29:21 +0000 Subject: [PATCH 089/246] test(auth): fix assert_ok's wrt nested structure --- tests/test_graphql/api_common.py | 89 +++++++++++++++++++++++++ tests/test_graphql/test_api_devices.py | 8 +-- tests/test_graphql/test_api_recovery.py | 16 ++--- 3 files changed, 101 insertions(+), 12 deletions(-) create mode 100644 tests/test_graphql/api_common.py diff --git a/tests/test_graphql/api_common.py b/tests/test_graphql/api_common.py new file mode 100644 index 0000000..bfac767 --- /dev/null +++ b/tests/test_graphql/api_common.py @@ -0,0 +1,89 @@ +from tests.common import generate_api_query +from tests.conftest import TOKENS_FILE_CONTENTS, DEVICE_WE_AUTH_TESTS_WITH + +ORIGINAL_DEVICES = TOKENS_FILE_CONTENTS["tokens"] + + +def assert_ok(response, request): + data = assert_data(response) + data[request]["success"] is True + data[request]["message"] is not None + data[request]["code"] == 200 + + +def assert_errorcode(response, request, code): + data = assert_data(response) + data[request]["success"] is False + data[request]["message"] is not None + data[request]["code"] == code + + +def assert_empty(response): + assert response.status_code == 200 + assert response.json().get("data") is None + + +def assert_data(response): + assert response.status_code == 200 + data = response.json().get("data") + assert data is not None + assert "api" in data.keys() + return data["api"] + + +API_DEVICES_QUERY = """ +devices { + creationDate + isCaller + name +} +""" + + +def request_devices(client): + return client.post( + "/graphql", + json={"query": generate_api_query([API_DEVICES_QUERY])}, + ) + + +def graphql_get_devices(client): + response = request_devices(client) + data = assert_data(response) + devices = data["devices"] + assert devices is not None + return devices + + +def set_client_token(client, token): + client.headers.update({"Authorization": "Bearer " + token}) + + +def assert_token_valid(client, token): + set_client_token(client, token) + assert graphql_get_devices(client) is not None + + +def assert_same(graphql_devices, abstract_devices): + """Orderless comparison""" + assert len(graphql_devices) == len(abstract_devices) + for original_device in abstract_devices: + assert original_device["name"] in [device["name"] for device in graphql_devices] + for device in graphql_devices: + if device["name"] == original_device["name"]: + assert device["creationDate"] == original_device["date"].isoformat() + + +def assert_original(client): + devices = graphql_get_devices(client) + assert_original_devices(devices) + + +def assert_original_devices(devices): + assert_same(devices, ORIGINAL_DEVICES) + + for device in devices: + if device["name"] == DEVICE_WE_AUTH_TESTS_WITH["name"]: + assert device["isCaller"] is True + else: + assert device["isCaller"] is False diff --git a/tests/test_graphql/test_api_devices.py b/tests/test_graphql/test_api_devices.py index 599fe24..b24bc7f 100644 --- a/tests/test_graphql/test_api_devices.py +++ b/tests/test_graphql/test_api_devices.py @@ -8,7 +8,7 @@ from tests.common import ( generate_api_query, ) from tests.conftest import DEVICE_WE_AUTH_TESTS_WITH, TOKENS_FILE_CONTENTS -from tests.test_graphql.common import ( +from tests.test_graphql.api_common import ( assert_data, assert_empty, assert_ok, @@ -38,7 +38,7 @@ def graphql_get_new_device_key(authorized_client) -> str: ) assert_ok(response, "getNewDeviceApiKey") - key = response.json()["data"]["getNewDeviceApiKey"]["key"] + key = response.json()["data"]["api"]["getNewDeviceApiKey"]["key"] assert key.split(" ").__len__() == 12 return key @@ -61,7 +61,7 @@ def graphql_try_auth_new_device(client, mnemonic_key, device_name): def graphql_authorize_new_device(client, mnemonic_key, device_name) -> str: response = graphql_try_auth_new_device(client, mnemonic_key, "new_device") assert_ok(response, "authorizeWithNewDeviceApiKey") - token = response.json()["data"]["authorizeWithNewDeviceApiKey"]["token"] + token = response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["token"] assert_token_valid(client, token) @@ -182,7 +182,7 @@ def test_graphql_refresh_token(authorized_client, client, tokens_file): ) assert_ok(response, "refreshDeviceApiToken") - new_token = response.json()["data"]["refreshDeviceApiToken"]["token"] + new_token = response.json()["data"]["api"]["refreshDeviceApiToken"]["token"] assert_token_valid(client, new_token) set_client_token(client, new_token) diff --git a/tests/test_graphql/test_api_recovery.py b/tests/test_graphql/test_api_recovery.py index ec5f094..e847b16 100644 --- a/tests/test_graphql/test_api_recovery.py +++ b/tests/test_graphql/test_api_recovery.py @@ -13,7 +13,7 @@ from tests.common import ( from tests.common import FIVE_MINUTES_INTO_FUTURE_NAIVE as FIVE_MINUTES_INTO_FUTURE from tests.common import FIVE_MINUTES_INTO_PAST_NAIVE as FIVE_MINUTES_INTO_PAST -from tests.test_graphql.common import ( +from tests.test_graphql.api_common import ( assert_empty, assert_data, assert_ok, @@ -46,7 +46,7 @@ def graphql_recovery_status(client): response = request_recovery_status(client) data = assert_data(response) - status = data["api"]["recoveryKey"] + status = data["recoveryKey"] assert status is not None return status @@ -70,7 +70,7 @@ def request_make_new_recovery_key(client, expires_at=None, uses=None): def graphql_make_new_recovery_key(client, expires_at=None, uses=None): response = request_make_new_recovery_key(client, expires_at, uses) assert_ok(response, "getNewRecoveryApiKey") - key = response.json()["data"]["getNewRecoveryApiKey"]["key"] + key = response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] assert key is not None assert key.split(" ").__len__() == 18 return key @@ -94,7 +94,7 @@ def request_recovery_auth(client, key, device_name): def graphql_use_recovery_key(client, key, device_name): response = request_recovery_auth(client, key, device_name) assert_ok(response, "useRecoveryApiKey") - token = response.json()["data"]["useRecoveryApiKey"]["token"] + token = response.json()["data"]["api"]["useRecoveryApiKey"]["token"] assert token is not None assert_token_valid(client, token) set_client_token(client, token) @@ -187,7 +187,7 @@ def test_graphql_use_recovery_key_after_expiration( response = request_recovery_auth(client, key, "new_test_token3") assert_errorcode(response, "useRecoveryApiKey", 404) - assert response.json()["data"]["useRecoveryApiKey"]["token"] is None + assert response.json()["data"]["api"]["useRecoveryApiKey"]["token"] is None assert_original(authorized_client) status = graphql_recovery_status(authorized_client) @@ -207,7 +207,7 @@ def test_graphql_generate_recovery_key_with_expiration_in_the_past( ) assert_errorcode(response, "getNewRecoveryApiKey", 400) - assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is None + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] is None assert graphql_recovery_status(authorized_client)["exists"] is False @@ -273,12 +273,12 @@ def test_graphql_generate_recovery_key_with_negative_uses( response = request_make_new_recovery_key(authorized_client, uses=-1) assert_errorcode(response, "getNewRecoveryApiKey", 400) - assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is None + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] is None def test_graphql_generate_recovery_key_with_zero_uses(authorized_client, tokens_file): response = request_make_new_recovery_key(authorized_client, uses=0) assert_errorcode(response, "getNewRecoveryApiKey", 400) - assert response.json()["data"]["getNewRecoveryApiKey"]["key"] is None + assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] is None assert graphql_recovery_status(authorized_client)["exists"] is False From a66ee2d3e565bbf815facda52a01d73d5504ae9b Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 1 Nov 2023 16:46:36 +0000 Subject: [PATCH 090/246] test(auth): fix future expiring too fast --- tests/common.py | 19 +++++++++++++++---- tests/test_graphql/test_api_recovery.py | 10 +++++----- .../test_repository/test_tokens_repository.py | 8 ++++---- tests/test_rest_endpoints/test_auth.py | 10 ++++++---- 4 files changed, 30 insertions(+), 17 deletions(-) diff --git a/tests/common.py b/tests/common.py index df95474..97d0d7a 100644 --- a/tests/common.py +++ b/tests/common.py @@ -6,10 +6,21 @@ from mnemonic import Mnemonic RECOVERY_KEY_VALIDATION_DATETIME = "selfprivacy_api.models.tokens.time.datetime" DEVICE_KEY_VALIDATION_DATETIME = RECOVERY_KEY_VALIDATION_DATETIME -FIVE_MINUTES_INTO_FUTURE_NAIVE = datetime.now() + timedelta(minutes=5) -FIVE_MINUTES_INTO_FUTURE = datetime.now(timezone.utc) + timedelta(minutes=5) -FIVE_MINUTES_INTO_PAST_NAIVE = datetime.now() - timedelta(minutes=5) -FIVE_MINUTES_INTO_PAST = datetime.now(timezone.utc) - timedelta(minutes=5) + +def five_minutes_into_future_naive(): + return datetime.now() + timedelta(minutes=5) + + +def five_minutes_into_future(): + return datetime.now(timezone.utc) + timedelta(minutes=5) + + +def five_minutes_into_past_naive(): + return datetime.now() - timedelta(minutes=5) + + +def five_minutes_into_past(): + return datetime.now(timezone.utc) - timedelta(minutes=5) class NearFuture(datetime): diff --git a/tests/test_graphql/test_api_recovery.py b/tests/test_graphql/test_api_recovery.py index e847b16..19f8a3d 100644 --- a/tests/test_graphql/test_api_recovery.py +++ b/tests/test_graphql/test_api_recovery.py @@ -10,8 +10,8 @@ from tests.common import ( ) # Graphql API's output should be timezone-naive -from tests.common import FIVE_MINUTES_INTO_FUTURE_NAIVE as FIVE_MINUTES_INTO_FUTURE -from tests.common import FIVE_MINUTES_INTO_PAST_NAIVE as FIVE_MINUTES_INTO_PAST +from tests.common import five_minutes_into_future_naive as five_minutes_into_future +from tests.common import five_minutes_into_past_naive as five_minutes_into_past from tests.test_graphql.api_common import ( assert_empty, @@ -161,7 +161,7 @@ def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): def test_graphql_generate_recovery_key_with_expiration_date( client, authorized_client, tokens_file ): - expiration_date = FIVE_MINUTES_INTO_FUTURE + expiration_date = five_minutes_into_future() key = graphql_make_new_recovery_key(authorized_client, expires_at=expiration_date) status = graphql_recovery_status(authorized_client) @@ -179,7 +179,7 @@ def test_graphql_generate_recovery_key_with_expiration_date( def test_graphql_use_recovery_key_after_expiration( client, authorized_client, tokens_file, mocker ): - expiration_date = FIVE_MINUTES_INTO_FUTURE + expiration_date = five_minutes_into_future() key = graphql_make_new_recovery_key(authorized_client, expires_at=expiration_date) # Timewarp to after it expires @@ -201,7 +201,7 @@ def test_graphql_use_recovery_key_after_expiration( def test_graphql_generate_recovery_key_with_expiration_in_the_past( authorized_client, tokens_file ): - expiration_date = FIVE_MINUTES_INTO_PAST + expiration_date = five_minutes_into_past() response = request_make_new_recovery_key( authorized_client, expires_at=expiration_date ) diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_graphql/test_repository/test_tokens_repository.py index 360bfa5..eb5e7cb 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_graphql/test_repository/test_tokens_repository.py @@ -27,7 +27,7 @@ from selfprivacy_api.repositories.tokens.abstract_tokens_repository import ( AbstractTokensRepository, ) -from tests.common import FIVE_MINUTES_INTO_PAST, FIVE_MINUTES_INTO_FUTURE +from tests.common import five_minutes_into_past, five_minutes_into_future ORIGINAL_DEVICE_NAMES = [ @@ -363,7 +363,7 @@ def test_use_mnemonic_expired_recovery_key( some_tokens_repo, ): repo = some_tokens_repo - expiration = FIVE_MINUTES_INTO_PAST + expiration = five_minutes_into_past() assert repo.create_recovery_key(uses_left=2, expiration=expiration) is not None recovery_key = repo.get_recovery_key() # TODO: do not ignore timezone once json backend is deleted @@ -543,7 +543,7 @@ def test_use_mnemonic_expired_new_device_key( some_tokens_repo, ): repo = some_tokens_repo - expiration = FIVE_MINUTES_INTO_PAST + expiration = five_minutes_into_past() key = repo.get_new_device_key() assert key is not None @@ -601,5 +601,5 @@ def test_clone_json_to_redis_full(some_tokens_repo): repo = some_tokens_repo if isinstance(repo, JsonTokensRepository): repo.get_new_device_key() - repo.create_recovery_key(FIVE_MINUTES_INTO_FUTURE, 2) + repo.create_recovery_key(five_minutes_into_future(), 2) clone_to_redis(repo) diff --git a/tests/test_rest_endpoints/test_auth.py b/tests/test_rest_endpoints/test_auth.py index ba54745..d62fa18 100644 --- a/tests/test_rest_endpoints/test_auth.py +++ b/tests/test_rest_endpoints/test_auth.py @@ -11,8 +11,8 @@ from tests.common import ( NearFuture, assert_recovery_recent, ) -from tests.common import FIVE_MINUTES_INTO_FUTURE_NAIVE as FIVE_MINUTES_INTO_FUTURE -from tests.common import FIVE_MINUTES_INTO_PAST_NAIVE as FIVE_MINUTES_INTO_PAST +from tests.common import five_minutes_into_future_naive as five_minutes_into_future +from tests.common import five_minutes_into_past_naive as five_minutes_into_past DATE_FORMATS = [ "%Y-%m-%dT%H:%M:%S.%fZ", @@ -76,6 +76,8 @@ def rest_make_recovery_token(client, expires_at=None, timeformat=None, uses=None json=json, ) + if not response.status_code == 200: + raise ValueError(response.reason, response.text, response.json()["detail"]) assert response.status_code == 200 assert "token" in response.json() return response.json()["token"] @@ -323,7 +325,7 @@ def test_generate_recovery_token_with_expiration_date( ): # Generate token with expiration date # Generate expiration date in the future - expiration_date = FIVE_MINUTES_INTO_FUTURE + expiration_date = five_minutes_into_future() mnemonic_token = rest_make_recovery_token( authorized_client, expires_at=expiration_date, timeformat=timeformat ) @@ -362,7 +364,7 @@ def test_generate_recovery_token_with_expiration_in_the_past( authorized_client, tokens_file, timeformat ): # Server must return 400 if expiration date is in the past - expiration_date = FIVE_MINUTES_INTO_PAST + expiration_date = five_minutes_into_past() expiration_date_str = expiration_date.strftime(timeformat) response = authorized_client.post( "/auth/recovery_token", From 8caf7e1b24124cfcdac80d21b42b718ac9b5fe17 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Tue, 7 Nov 2023 01:00:38 +0000 Subject: [PATCH 091/246] fix(backups): do not infinitely retry automatic backup if it errors out --- selfprivacy_api/backup/__init__.py | 38 +++++++++++++++++++++++++----- selfprivacy_api/backup/jobs.py | 10 ++++++++ tests/test_graphql/test_backup.py | 32 +++++++++++++++++++++++++ 3 files changed, 74 insertions(+), 6 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index aa11f7f..a5fe066 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -1,7 +1,8 @@ """ This module contains the controller class for backups. """ -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone +import time import os from os import statvfs from typing import Callable, List, Optional @@ -37,6 +38,7 @@ from selfprivacy_api.backup.providers import get_provider from selfprivacy_api.backup.storage import Storage from selfprivacy_api.backup.jobs import ( get_backup_job, + get_backup_fail, add_backup_job, get_restore_job, add_restore_job, @@ -292,9 +294,9 @@ class Backups: def back_up( service: Service, reason: BackupReason = BackupReason.EXPLICIT ) -> Snapshot: - """The top-level function to back up a service""" - folders = service.get_folders() - service_name = service.get_id() + """The top-level function to back up a service + If it fails for any reason at all, it should both mark job as + errored and re-raise an error""" job = get_backup_job(service) if job is None: @@ -302,6 +304,10 @@ class Backups: Jobs.update(job, status=JobStatus.RUNNING) try: + if service.can_be_backed_up() is False: + raise ValueError("cannot backup a non-backuppable service") + folders = service.get_folders() + service_name = service.get_id() service.pre_backup() snapshot = Backups.provider().backupper.start_backup( folders, @@ -692,23 +698,43 @@ class Backups: """Get a timezone-aware time of the last backup of a service""" return Storage.get_last_backup_time(service.get_id()) + @staticmethod + def get_last_backup_error_time(service: Service) -> Optional[datetime]: + """Get a timezone-aware time of the last backup of a service""" + job = get_backup_fail(service) + if job is not None: + datetime_created = job.created_at + if datetime_created.tzinfo is None: + # assume it is in localtime + offset = timedelta(seconds=time.localtime().tm_gmtoff) + datetime_created = datetime_created - offset + return datetime.combine(datetime_created.date(), datetime_created.time(),timezone.utc) + return datetime_created + return None + @staticmethod def is_time_to_backup_service(service: Service, time: datetime): """Returns True if it is time to back up a service""" period = Backups.autobackup_period_minutes() - service_id = service.get_id() if not service.can_be_backed_up(): return False if period is None: return False - last_backup = Storage.get_last_backup_time(service_id) + last_error = Backups.get_last_backup_error_time(service) + + if last_error is not None: + if time < last_error + timedelta(seconds=AUTOBACKUP_JOB_EXPIRATION_SECONDS): + return False + + last_backup = Backups.get_last_backed_up(service) if last_backup is None: # queue a backup immediately if there are no previous backups return True if time > last_backup + timedelta(minutes=period): return True + return False # Helpers diff --git a/selfprivacy_api/backup/jobs.py b/selfprivacy_api/backup/jobs.py index ab4eaca..0aacd86 100644 --- a/selfprivacy_api/backup/jobs.py +++ b/selfprivacy_api/backup/jobs.py @@ -80,9 +80,19 @@ def get_job_by_type(type_id: str) -> Optional[Job]: return job +def get_failed_job_by_type(type_id: str) -> Optional[Job]: + for job in Jobs.get_jobs(): + if job.type_id == type_id and job.status == JobStatus.ERROR: + return job + + def get_backup_job(service: Service) -> Optional[Job]: return get_job_by_type(backup_job_type(service)) +def get_backup_fail(service: Service) -> Optional[Job]: + return get_failed_job_by_type(backup_job_type(service)) + + def get_restore_job(service: Service) -> Optional[Job]: return get_job_by_type(restore_job_type(service)) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 1903fba..27a2879 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -14,6 +14,8 @@ import secrets import tempfile +from selfprivacy_api.utils.huey import huey + import selfprivacy_api.services as services from selfprivacy_api.services import Service, get_all_services from selfprivacy_api.services.service import ServiceStatus @@ -119,6 +121,10 @@ def dummy_service(tmpdir, backups, raw_dummy_service) -> Service: # register our service services.services.append(service) + # make sure we are in immediate mode because this thing is non pickleable to store on queue. + huey.immediate = True + assert huey.immediate is True + assert get_service_by_id(service.get_id()) is not None yield service @@ -996,6 +1002,32 @@ def test_autobackup_timing(backups, dummy_service): assert Backups.is_time_to_backup_service(dummy_service, future) +def test_backup_unbackuppable(backups, dummy_service): + dummy_service.set_backuppable(False) + assert dummy_service.can_be_backed_up() is False + with pytest.raises(ValueError): + Backups.back_up(dummy_service) + + +def test_failed_autoback_prevents_more_autobackup(backups, dummy_service): + backup_period = 13 # minutes + now = datetime.now(timezone.utc) + + Backups.set_autobackup_period_minutes(backup_period) + assert Backups.is_time_to_backup_service(dummy_service, now) + + # artificially making an errored out backup job + dummy_service.set_backuppable(False) + with pytest.raises(ValueError): + Backups.back_up(dummy_service) + dummy_service.set_backuppable(True) + + assert Backups.get_last_backed_up(dummy_service) is None + assert Backups.get_last_backup_error_time(dummy_service) is not None + + assert Backups.is_time_to_backup_service(dummy_service, now) is False + + # Storage def test_snapshots_caching(backups, dummy_service): Backups.back_up(dummy_service) From b545a400c3657848bb570aed96d047514dc8c133 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 10 Nov 2023 11:47:48 +0000 Subject: [PATCH 092/246] doc(jobs): document that we are tz-naive when storing jobs --- selfprivacy_api/jobs/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index 05b5ab8..7310016 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -8,8 +8,8 @@ A job is a dictionary with the following keys: - name: name of the job - description: description of the job - status: status of the job - - created_at: date of creation of the job - - updated_at: date of last update of the job + - created_at: date of creation of the job, naive localtime + - updated_at: date of last update of the job, naive localtime - finished_at: date of finish of the job - error: error message if the job failed - result: result of the job From 73a847f28849a181a52353a0eb7346787370af8b Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 10 Nov 2023 12:19:32 +0000 Subject: [PATCH 093/246] feature(time): timestamp parsers --- selfprivacy_api/utils/time.py | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 selfprivacy_api/utils/time.py diff --git a/selfprivacy_api/utils/time.py b/selfprivacy_api/utils/time.py new file mode 100644 index 0000000..5eb7e04 --- /dev/null +++ b/selfprivacy_api/utils/time.py @@ -0,0 +1,30 @@ +from datetime import datetime, timezone + + +def tzaware_parse_time(iso_timestamp: str) -> datetime: + """ + parse an iso8601 timestamp into timezone-aware datetime + assume utc if no timezone in stamp + example of timestamp: + 2023-11-10T12:07:47.868788+00:00 + + """ + dt = datetime.fromisoformat(iso_timestamp) + if dt.tzinfo is None: + dt = dt.astimezone(timezone.utc) + return dt + + +def tzaware_parse_time_strict(iso_timestamp: str) -> datetime: + """ + parse an iso8601 timestamp into timezone-aware datetime + raise an error if no timezone in stamp + example of timestamp: + 2023-11-10T12:07:47.868788+00:00 + + """ + dt = datetime.fromisoformat(iso_timestamp) + if dt.tzinfo is None: + raise ValueError("no timezone in timestamp", iso_timestamp) + dt = dt.astimezone(timezone.utc) + return dt From 4d893d56b24dfa9f0bb837d9faa846a9efa214a5 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 10 Nov 2023 12:29:25 +0000 Subject: [PATCH 094/246] test(common): add forced utc times for tests --- tests/common.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/tests/common.py b/tests/common.py index 97d0d7a..55b95a6 100644 --- a/tests/common.py +++ b/tests/common.py @@ -11,6 +11,10 @@ def five_minutes_into_future_naive(): return datetime.now() + timedelta(minutes=5) +def five_minutes_into_future_naive_utc(): + return datetime.utcnow() + timedelta(minutes=5) + + def five_minutes_into_future(): return datetime.now(timezone.utc) + timedelta(minutes=5) @@ -19,6 +23,10 @@ def five_minutes_into_past_naive(): return datetime.now() - timedelta(minutes=5) +def five_minutes_into_past_naive_utc(): + return datetime.utcnow() - timedelta(minutes=5) + + def five_minutes_into_past(): return datetime.now(timezone.utc) - timedelta(minutes=5) From e78bcca9f2f9978136f4e80e11cb854ac87bad4c Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 10 Nov 2023 12:49:30 +0000 Subject: [PATCH 095/246] test(auth): forced utc in recovery tests --- tests/test_graphql/test_api_recovery.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_graphql/test_api_recovery.py b/tests/test_graphql/test_api_recovery.py index 19f8a3d..593c50b 100644 --- a/tests/test_graphql/test_api_recovery.py +++ b/tests/test_graphql/test_api_recovery.py @@ -10,8 +10,8 @@ from tests.common import ( ) # Graphql API's output should be timezone-naive -from tests.common import five_minutes_into_future_naive as five_minutes_into_future -from tests.common import five_minutes_into_past_naive as five_minutes_into_past +from tests.common import five_minutes_into_future_naive_utc as five_minutes_into_future +from tests.common import five_minutes_into_past_naive_utc as five_minutes_into_past from tests.test_graphql.api_common import ( assert_empty, From 8453f62c746251c11a74aa4acd2e4517f35aa415 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 10 Nov 2023 13:05:38 +0000 Subject: [PATCH 096/246] refactor(time): more time functions --- selfprivacy_api/utils/time.py | 30 +++++++++++++++++++++++++----- 1 file changed, 25 insertions(+), 5 deletions(-) diff --git a/selfprivacy_api/utils/time.py b/selfprivacy_api/utils/time.py index 5eb7e04..36871c3 100644 --- a/selfprivacy_api/utils/time.py +++ b/selfprivacy_api/utils/time.py @@ -1,6 +1,29 @@ from datetime import datetime, timezone +def ensure_tz_aware(dt: datetime) -> datetime: + """ + returns timezone-aware datetime + assumes utc on naive datetime input + """ + if dt.tzinfo is None: + dt = dt.astimezone(timezone.utc) + return dt + + +def ensure_tz_aware_strict(dt: datetime) -> datetime: + """ + returns timezone-aware datetime + raises error if input is a naive datetime + """ + if dt.tzinfo is None: + raise ValueError( + "no timezone in datetime (tz-aware datetime is required for this operation)", + dt, + ) + return dt + + def tzaware_parse_time(iso_timestamp: str) -> datetime: """ parse an iso8601 timestamp into timezone-aware datetime @@ -10,8 +33,7 @@ def tzaware_parse_time(iso_timestamp: str) -> datetime: """ dt = datetime.fromisoformat(iso_timestamp) - if dt.tzinfo is None: - dt = dt.astimezone(timezone.utc) + dt = ensure_tz_aware(dt) return dt @@ -24,7 +46,5 @@ def tzaware_parse_time_strict(iso_timestamp: str) -> datetime: """ dt = datetime.fromisoformat(iso_timestamp) - if dt.tzinfo is None: - raise ValueError("no timezone in timestamp", iso_timestamp) - dt = dt.astimezone(timezone.utc) + dt = ensure_tz_aware_strict(dt) return dt From 8badb9aaaf79fd20ff3311b936bfcf9a10d21766 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 10 Nov 2023 13:31:12 +0000 Subject: [PATCH 097/246] refactor(auth): tz_aware expiration comparison --- selfprivacy_api/actions/api_tokens.py | 6 ++++-- selfprivacy_api/utils/{time.py => timeutils.py} | 0 2 files changed, 4 insertions(+), 2 deletions(-) rename selfprivacy_api/utils/{time.py => timeutils.py} (100%) diff --git a/selfprivacy_api/actions/api_tokens.py b/selfprivacy_api/actions/api_tokens.py index 37b7631..3746c57 100644 --- a/selfprivacy_api/actions/api_tokens.py +++ b/selfprivacy_api/actions/api_tokens.py @@ -7,6 +7,7 @@ from typing import Optional from pydantic import BaseModel from mnemonic import Mnemonic +from selfprivacy_api.utils.timeutils import ensure_tz_aware from selfprivacy_api.repositories.tokens.redis_tokens_repository import ( RedisTokensRepository, ) @@ -121,8 +122,9 @@ def get_new_api_recovery_key( ) -> str: """Get new recovery key""" if expiration_date is not None: - current_time = datetime.now().timestamp() - if expiration_date.timestamp() < current_time: + expiration_date = ensure_tz_aware(expiration_date) + current_time = datetime.now(timezone.utc) + if expiration_date < current_time: raise InvalidExpirationDate("Expiration date is in the past") if uses_left is not None: if uses_left <= 0: diff --git a/selfprivacy_api/utils/time.py b/selfprivacy_api/utils/timeutils.py similarity index 100% rename from selfprivacy_api/utils/time.py rename to selfprivacy_api/utils/timeutils.py From dd6f37a17d918e4ea92f3cc0959982c3d7e5c6ed Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 10 Nov 2023 17:10:01 +0000 Subject: [PATCH 098/246] feature(auth): tz_aware recovery --- selfprivacy_api/actions/api_tokens.py | 14 +++++++---- .../graphql/queries/api_queries.py | 2 +- tests/common.py | 7 +++--- tests/test_graphql/test_api_recovery.py | 24 +++++++++++++++---- tests/test_rest_endpoints/test_auth.py | 3 ++- 5 files changed, 36 insertions(+), 14 deletions(-) diff --git a/selfprivacy_api/actions/api_tokens.py b/selfprivacy_api/actions/api_tokens.py index 3746c57..e93491f 100644 --- a/selfprivacy_api/actions/api_tokens.py +++ b/selfprivacy_api/actions/api_tokens.py @@ -7,7 +7,7 @@ from typing import Optional from pydantic import BaseModel from mnemonic import Mnemonic -from selfprivacy_api.utils.timeutils import ensure_tz_aware +from selfprivacy_api.utils.timeutils import ensure_tz_aware, ensure_tz_aware_strict from selfprivacy_api.repositories.tokens.redis_tokens_repository import ( RedisTokensRepository, ) @@ -95,16 +95,22 @@ class RecoveryTokenStatus(BaseModel): def get_api_recovery_token_status() -> RecoveryTokenStatus: - """Get the recovery token status""" + """Get the recovery token status, timezone-aware""" token = TOKEN_REPO.get_recovery_key() if token is None: return RecoveryTokenStatus(exists=False, valid=False) is_valid = TOKEN_REPO.is_recovery_key_valid() + + # New tokens are tz-aware, but older ones might not be + expiry_date = token.expires_at + if expiry_date is not None: + expiry_date = ensure_tz_aware_strict(expiry_date) + return RecoveryTokenStatus( exists=True, valid=is_valid, - date=_naive(token.created_at), - expiration=_naive(token.expires_at), + date=ensure_tz_aware_strict(token.created_at), + expiration=expiry_date, uses_left=token.uses_left, ) diff --git a/selfprivacy_api/graphql/queries/api_queries.py b/selfprivacy_api/graphql/queries/api_queries.py index cf56231..7052ded 100644 --- a/selfprivacy_api/graphql/queries/api_queries.py +++ b/selfprivacy_api/graphql/queries/api_queries.py @@ -38,7 +38,7 @@ class ApiRecoveryKeyStatus: def get_recovery_key_status() -> ApiRecoveryKeyStatus: - """Get recovery key status""" + """Get recovery key status, times are timezone-aware""" status = get_api_recovery_token_status() if status is None or not status.exists: return ApiRecoveryKeyStatus( diff --git a/tests/common.py b/tests/common.py index 55b95a6..c327ae9 100644 --- a/tests/common.py +++ b/tests/common.py @@ -67,8 +67,7 @@ def mnemonic_to_hex(mnemonic): return Mnemonic(language="english").to_entropy(mnemonic).hex() -def assert_recovery_recent(time_generated): - assert ( - datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%f") - timedelta(seconds=5) - < datetime.now() +def assert_recovery_recent(time_generated: str): + assert datetime.fromisoformat(time_generated) - timedelta(seconds=5) < datetime.now( + timezone.utc ) diff --git a/tests/test_graphql/test_api_recovery.py b/tests/test_graphql/test_api_recovery.py index 593c50b..b0155e7 100644 --- a/tests/test_graphql/test_api_recovery.py +++ b/tests/test_graphql/test_api_recovery.py @@ -2,6 +2,10 @@ # pylint: disable=unused-argument # pylint: disable=missing-function-docstring +import pytest + +from datetime import datetime, timezone + from tests.common import ( generate_api_query, assert_recovery_recent, @@ -11,6 +15,7 @@ from tests.common import ( # Graphql API's output should be timezone-naive from tests.common import five_minutes_into_future_naive_utc as five_minutes_into_future +from tests.common import five_minutes_into_future as five_minutes_into_future_tz from tests.common import five_minutes_into_past_naive_utc as five_minutes_into_past from tests.test_graphql.api_common import ( @@ -158,17 +163,24 @@ def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): graphql_use_recovery_key(client, key, "new_test_token2") +@pytest.mark.parametrize( + "expiration_date", [five_minutes_into_future(), five_minutes_into_future_tz()] +) def test_graphql_generate_recovery_key_with_expiration_date( - client, authorized_client, tokens_file + client, authorized_client, tokens_file, expiration_date: datetime ): - expiration_date = five_minutes_into_future() key = graphql_make_new_recovery_key(authorized_client, expires_at=expiration_date) status = graphql_recovery_status(authorized_client) assert status["exists"] is True assert status["valid"] is True assert_recovery_recent(status["creationDate"]) - assert status["expirationDate"] == expiration_date.isoformat() + + # timezone-aware comparison. Should pass regardless of server's tz + assert datetime.fromisoformat( + status["expirationDate"] + ) == expiration_date.astimezone(timezone.utc) + assert status["usesLeft"] is None graphql_use_recovery_key(client, key, "new_test_token") @@ -194,7 +206,11 @@ def test_graphql_use_recovery_key_after_expiration( assert status["exists"] is True assert status["valid"] is False assert_recovery_recent(status["creationDate"]) - assert status["expirationDate"] == expiration_date.isoformat() + + # timezone-aware comparison. Should pass regardless of server's tz + assert datetime.fromisoformat( + status["expirationDate"] + ) == expiration_date.astimezone(timezone.utc) assert status["usesLeft"] is None diff --git a/tests/test_rest_endpoints/test_auth.py b/tests/test_rest_endpoints/test_auth.py index d62fa18..8565143 100644 --- a/tests/test_rest_endpoints/test_auth.py +++ b/tests/test_rest_endpoints/test_auth.py @@ -2,6 +2,7 @@ # pylint: disable=unused-argument # pylint: disable=missing-function-docstring import datetime +from datetime import timezone import pytest from tests.conftest import TOKENS_FILE_CONTENTS @@ -337,7 +338,7 @@ def test_generate_recovery_token_with_expiration_date( "exists": True, "valid": True, "date": time_generated, - "expiration": expiration_date.isoformat(), + "expiration": expiration_date.astimezone(timezone.utc).isoformat(), "uses_left": None, } From 1bbb804919a76d827f621a70f7069e52de12a474 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 10 Nov 2023 17:40:52 +0000 Subject: [PATCH 099/246] test(auth): token tests clearer about timezone assumptions --- selfprivacy_api/models/tokens/new_device_key.py | 4 ++-- selfprivacy_api/models/tokens/recovery_key.py | 1 + tests/test_models.py | 15 +++++++++++---- 3 files changed, 14 insertions(+), 6 deletions(-) diff --git a/selfprivacy_api/models/tokens/new_device_key.py b/selfprivacy_api/models/tokens/new_device_key.py index 9fbd23b..241cbd3 100644 --- a/selfprivacy_api/models/tokens/new_device_key.py +++ b/selfprivacy_api/models/tokens/new_device_key.py @@ -22,7 +22,7 @@ class NewDeviceKey(BaseModel): def is_valid(self) -> bool: """ - Check if the recovery key is valid. + Check if key is valid. """ if is_past(self.expires_at): return False @@ -30,7 +30,7 @@ class NewDeviceKey(BaseModel): def as_mnemonic(self) -> str: """ - Get the recovery key as a mnemonic. + Get the key as a mnemonic. """ return Mnemonic(language="english").to_mnemonic(bytes.fromhex(self.key)) diff --git a/selfprivacy_api/models/tokens/recovery_key.py b/selfprivacy_api/models/tokens/recovery_key.py index 3b81398..3f52735 100644 --- a/selfprivacy_api/models/tokens/recovery_key.py +++ b/selfprivacy_api/models/tokens/recovery_key.py @@ -47,6 +47,7 @@ class RecoveryKey(BaseModel): ) -> "RecoveryKey": """ Factory to generate a random token. + If passed naive time as expiration, assumes utc """ creation_date = datetime.now(timezone.utc) if expiration is not None: diff --git a/tests/test_models.py b/tests/test_models.py index 2263e82..f01bb4f 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -1,18 +1,25 @@ import pytest -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from selfprivacy_api.models.tokens.recovery_key import RecoveryKey from selfprivacy_api.models.tokens.new_device_key import NewDeviceKey -def test_recovery_key_expired(): - expiration = datetime.now() - timedelta(minutes=5) +def test_recovery_key_expired_utcnaive(): + expiration = datetime.utcnow() - timedelta(minutes=5) + key = RecoveryKey.generate(expiration=expiration, uses_left=2) + assert not key.is_valid() + + +def test_recovery_key_expired_tzaware(): + expiration = datetime.now(timezone.utc) - timedelta(minutes=5) key = RecoveryKey.generate(expiration=expiration, uses_left=2) assert not key.is_valid() def test_new_device_key_expired(): - expiration = datetime.now() - timedelta(minutes=5) + # key is supposed to be tzaware + expiration = datetime.now(timezone.utc) - timedelta(minutes=5) key = NewDeviceKey.generate() key.expires_at = expiration assert not key.is_valid() From e414f3b8fd46be99b2dfe4b0fb750086cb73271a Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 13 Nov 2023 09:15:12 -0700 Subject: [PATCH 100/246] fix(auth): fix timezone issues with recovery tokens --- selfprivacy_api/utils/timeutils.py | 4 +++- tests/common.py | 4 ++++ tests/test_graphql/api_common.py | 12 ++++++------ tests/test_graphql/test_api_recovery.py | 12 ++++++------ tests/test_rest_endpoints/test_auth.py | 6 +++--- 5 files changed, 22 insertions(+), 16 deletions(-) diff --git a/selfprivacy_api/utils/timeutils.py b/selfprivacy_api/utils/timeutils.py index 36871c3..b6494c6 100644 --- a/selfprivacy_api/utils/timeutils.py +++ b/selfprivacy_api/utils/timeutils.py @@ -7,7 +7,9 @@ def ensure_tz_aware(dt: datetime) -> datetime: assumes utc on naive datetime input """ if dt.tzinfo is None: - dt = dt.astimezone(timezone.utc) + # astimezone() is dangerous, it makes an implicit assumption that + # the time is localtime + dt = dt.replace(tzinfo=timezone.utc) return dt diff --git a/tests/common.py b/tests/common.py index c327ae9..5199899 100644 --- a/tests/common.py +++ b/tests/common.py @@ -36,6 +36,10 @@ class NearFuture(datetime): def now(cls, tz=None): return datetime.now(tz) + timedelta(minutes=13) + @classmethod + def utcnow(cls): + return datetime.utcnow() + timedelta(minutes=13) + def read_json(file_path): with open(file_path, "r", encoding="utf-8") as file: diff --git a/tests/test_graphql/api_common.py b/tests/test_graphql/api_common.py index bfac767..4e4aec2 100644 --- a/tests/test_graphql/api_common.py +++ b/tests/test_graphql/api_common.py @@ -6,16 +6,16 @@ ORIGINAL_DEVICES = TOKENS_FILE_CONTENTS["tokens"] def assert_ok(response, request): data = assert_data(response) - data[request]["success"] is True - data[request]["message"] is not None - data[request]["code"] == 200 + assert data[request]["success"] is True + assert data[request]["message"] is not None + assert data[request]["code"] == 200 def assert_errorcode(response, request, code): data = assert_data(response) - data[request]["success"] is False - data[request]["message"] is not None - data[request]["code"] == code + assert data[request]["success"] is False + assert data[request]["message"] is not None + assert data[request]["code"] == code def assert_empty(response): diff --git a/tests/test_graphql/test_api_recovery.py b/tests/test_graphql/test_api_recovery.py index b0155e7..629bac0 100644 --- a/tests/test_graphql/test_api_recovery.py +++ b/tests/test_graphql/test_api_recovery.py @@ -177,9 +177,9 @@ def test_graphql_generate_recovery_key_with_expiration_date( assert_recovery_recent(status["creationDate"]) # timezone-aware comparison. Should pass regardless of server's tz - assert datetime.fromisoformat( - status["expirationDate"] - ) == expiration_date.astimezone(timezone.utc) + assert datetime.fromisoformat(status["expirationDate"]) == expiration_date.replace( + tzinfo=timezone.utc + ) assert status["usesLeft"] is None @@ -208,9 +208,9 @@ def test_graphql_use_recovery_key_after_expiration( assert_recovery_recent(status["creationDate"]) # timezone-aware comparison. Should pass regardless of server's tz - assert datetime.fromisoformat( - status["expirationDate"] - ) == expiration_date.astimezone(timezone.utc) + assert datetime.fromisoformat(status["expirationDate"]) == expiration_date.replace( + tzinfo=timezone.utc + ) assert status["usesLeft"] is None diff --git a/tests/test_rest_endpoints/test_auth.py b/tests/test_rest_endpoints/test_auth.py index 8565143..4d0d2ed 100644 --- a/tests/test_rest_endpoints/test_auth.py +++ b/tests/test_rest_endpoints/test_auth.py @@ -12,8 +12,8 @@ from tests.common import ( NearFuture, assert_recovery_recent, ) -from tests.common import five_minutes_into_future_naive as five_minutes_into_future -from tests.common import five_minutes_into_past_naive as five_minutes_into_past +from tests.common import five_minutes_into_future_naive_utc as five_minutes_into_future +from tests.common import five_minutes_into_past_naive_utc as five_minutes_into_past DATE_FORMATS = [ "%Y-%m-%dT%H:%M:%S.%fZ", @@ -338,7 +338,7 @@ def test_generate_recovery_token_with_expiration_date( "exists": True, "valid": True, "date": time_generated, - "expiration": expiration_date.astimezone(timezone.utc).isoformat(), + "expiration": expiration_date.replace(tzinfo=timezone.utc).isoformat(), "uses_left": None, } From c3cec36ad4f331a5397681f414e655f775ed7a34 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Mon, 13 Nov 2023 19:36:12 +0300 Subject: [PATCH 101/246] style: formatting --- selfprivacy_api/backup/__init__.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index a5fe066..f575ac0 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -708,7 +708,9 @@ class Backups: # assume it is in localtime offset = timedelta(seconds=time.localtime().tm_gmtoff) datetime_created = datetime_created - offset - return datetime.combine(datetime_created.date(), datetime_created.time(),timezone.utc) + return datetime.combine( + datetime_created.date(), datetime_created.time(), timezone.utc + ) return datetime_created return None From 96bff873a9c586623a6196b21ad36b2b50c5e923 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 17 Nov 2023 14:32:31 +0000 Subject: [PATCH 102/246] test(backups): use backup fixture (weird, i surely used it before) --- .../graphql/mutations/backup_mutations.py | 2 +- tests/test_graphql/test_api_backup.py | 30 +++++++++++-------- 2 files changed, 18 insertions(+), 14 deletions(-) diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index dcfebff..954d0d4 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -115,7 +115,7 @@ class BackupMutations: except Exception as e: return GenericBackupConfigReturn( success=False, - message=str(e), + message=type(e).__name__ + ":" + str(e), code=400, configuration=Backup().configuration(), ) diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index 225abf7..b44fd44 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -265,6 +265,10 @@ def api_init_without_key( def assert_ok(data): + if data["success"] is False: + # convenience for debugging, this should display error + # if empty, consider adding helpful messages + raise ValueError(data["code"], data["message"]) assert data["code"] == 200 assert data["success"] is True @@ -302,7 +306,7 @@ def test_snapshots_empty(authorized_client, dummy_service): assert snaps == [] -def test_start_backup(authorized_client, dummy_service): +def test_start_backup(authorized_client, dummy_service, backups): response = api_backup(authorized_client, dummy_service) data = get_data(response)["backup"]["startBackup"] assert data["success"] is True @@ -318,7 +322,7 @@ def test_start_backup(authorized_client, dummy_service): assert snap["service"]["id"] == "testservice" -def test_restore(authorized_client, dummy_service): +def test_restore(authorized_client, dummy_service, backups): api_backup(authorized_client, dummy_service) snap = api_snapshots(authorized_client)[0] assert snap["id"] is not None @@ -331,7 +335,7 @@ def test_restore(authorized_client, dummy_service): assert Jobs.get_job(job["uid"]).status == JobStatus.FINISHED -def test_reinit(authorized_client, dummy_service, tmpdir): +def test_reinit(authorized_client, dummy_service, tmpdir, backups): test_repo_path = path.join(tmpdir, "not_at_all_sus") response = api_init_without_key( authorized_client, "FILE", "", "", test_repo_path, "" @@ -353,7 +357,7 @@ def test_reinit(authorized_client, dummy_service, tmpdir): assert Jobs.get_job(job["uid"]).status == JobStatus.FINISHED -def test_remove(authorized_client, generic_userdata): +def test_remove(authorized_client, generic_userdata, backups): response = api_remove(authorized_client) data = get_data(response)["backup"]["removeRepository"] assert_ok(data) @@ -367,7 +371,7 @@ def test_remove(authorized_client, generic_userdata): assert configuration["isInitialized"] is False -def test_autobackup_quotas_nonzero(authorized_client): +def test_autobackup_quotas_nonzero(authorized_client, backups): quotas = _AutobackupQuotas( last=3, daily=2, @@ -383,7 +387,7 @@ def test_autobackup_quotas_nonzero(authorized_client): assert configuration["autobackupQuotas"] == quotas -def test_autobackup_period_nonzero(authorized_client): +def test_autobackup_period_nonzero(authorized_client, backups): new_period = 11 response = api_set_period(authorized_client, new_period) data = get_data(response)["backup"]["setAutobackupPeriod"] @@ -393,7 +397,7 @@ def test_autobackup_period_nonzero(authorized_client): assert configuration["autobackupPeriod"] == new_period -def test_autobackup_period_zero(authorized_client): +def test_autobackup_period_zero(authorized_client, backups): new_period = 0 # since it is none by default, we better first set it to something non-negative response = api_set_period(authorized_client, 11) @@ -406,7 +410,7 @@ def test_autobackup_period_zero(authorized_client): assert configuration["autobackupPeriod"] == None -def test_autobackup_period_none(authorized_client): +def test_autobackup_period_none(authorized_client, backups): # since it is none by default, we better first set it to something non-negative response = api_set_period(authorized_client, 11) # and now we nullify it @@ -418,7 +422,7 @@ def test_autobackup_period_none(authorized_client): assert configuration["autobackupPeriod"] == None -def test_autobackup_period_negative(authorized_client): +def test_autobackup_period_negative(authorized_client, backups): # since it is none by default, we better first set it to something non-negative response = api_set_period(authorized_client, 11) # and now we nullify it @@ -432,7 +436,7 @@ def test_autobackup_period_negative(authorized_client): # We cannot really check the effect at this level, we leave it to backend tests # But we still make it run in both empty and full scenarios and ask for snaps afterwards -def test_reload_snapshots_bare_bare_bare(authorized_client, dummy_service): +def test_reload_snapshots_bare_bare_bare(authorized_client, dummy_service, backups): api_remove(authorized_client) response = api_reload_snapshots(authorized_client) @@ -443,7 +447,7 @@ def test_reload_snapshots_bare_bare_bare(authorized_client, dummy_service): assert snaps == [] -def test_reload_snapshots(authorized_client, dummy_service): +def test_reload_snapshots(authorized_client, dummy_service, backups): response = api_backup(authorized_client, dummy_service) data = get_data(response)["backup"]["startBackup"] @@ -455,7 +459,7 @@ def test_reload_snapshots(authorized_client, dummy_service): assert len(snaps) == 1 -def test_forget_snapshot(authorized_client, dummy_service): +def test_forget_snapshot(authorized_client, dummy_service, backups): response = api_backup(authorized_client, dummy_service) data = get_data(response)["backup"]["startBackup"] @@ -470,7 +474,7 @@ def test_forget_snapshot(authorized_client, dummy_service): assert len(snaps) == 0 -def test_forget_nonexistent_snapshot(authorized_client, dummy_service): +def test_forget_nonexistent_snapshot(authorized_client, dummy_service, backups): snaps = api_snapshots(authorized_client) assert len(snaps) == 0 response = api_forget(authorized_client, "898798uekiodpjoiweoiwuoeirueor") From 615e962965649dc1091bc021f94ad5243041f59d Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 17 Nov 2023 14:36:11 +0000 Subject: [PATCH 103/246] feature(backups): batched removals of snapshots --- selfprivacy_api/backup/__init__.py | 18 +++++++++++++++--- selfprivacy_api/backup/backuppers/__init__.py | 5 +++++ .../backup/backuppers/none_backupper.py | 5 ++++- .../backup/backuppers/restic_backupper.py | 18 +++++++++++------- 4 files changed, 35 insertions(+), 11 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index f575ac0..3b21a29 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -397,9 +397,8 @@ class Backups: # TODO: Can be optimized since there is forgetting of an array in one restic op # but most of the time this will be only one snap to forget. - for snap in auto_snaps: - if snap not in new_snaplist: - Backups.forget_snapshot(snap) + deletable_snaps = [snap for snap in auto_snaps if snap not in new_snaplist] + Backups.forget_snapshots(deletable_snaps) @staticmethod def _standardize_quotas(i: int) -> int: @@ -606,6 +605,19 @@ class Backups: return snap + @staticmethod + def forget_snapshots(snapshots: List[Snapshot]) -> None: + """ + Deletes a batch of snapshots from the repo and from cache + Optimized + """ + ids = [snapshot.id for snapshot in snapshots] + Backups.provider().backupper.forget_snapshots(ids) + + # less critical + for snapshot in snapshots: + Storage.delete_cached_snapshot(snapshot) + @staticmethod def forget_snapshot(snapshot: Snapshot) -> None: """Deletes a snapshot from the repo and from cache""" diff --git a/selfprivacy_api/backup/backuppers/__init__.py b/selfprivacy_api/backup/backuppers/__init__.py index 0067a41..46a719e 100644 --- a/selfprivacy_api/backup/backuppers/__init__.py +++ b/selfprivacy_api/backup/backuppers/__init__.py @@ -66,3 +66,8 @@ class AbstractBackupper(ABC): def forget_snapshot(self, snapshot_id) -> None: """Forget a snapshot""" raise NotImplementedError + + @abstractmethod + def forget_snapshots(self, snapshot_ids: List[str]) -> None: + """Maybe optimized deletion of a batch of snapshots, just cycling if unsupported""" + raise NotImplementedError diff --git a/selfprivacy_api/backup/backuppers/none_backupper.py b/selfprivacy_api/backup/backuppers/none_backupper.py index 429d9ab..86e25a6 100644 --- a/selfprivacy_api/backup/backuppers/none_backupper.py +++ b/selfprivacy_api/backup/backuppers/none_backupper.py @@ -39,4 +39,7 @@ class NoneBackupper(AbstractBackupper): raise NotImplementedError def forget_snapshot(self, snapshot_id): - raise NotImplementedError + raise NotImplementedError("forget_snapshot") + + def forget_snapshots(self, snapshots): + raise NotImplementedError("forget_snapshots") diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index afa6295..fd653e6 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -86,6 +86,10 @@ class ResticBackupper(AbstractBackupper): return f"echo {LocalBackupSecret.get()}" def restic_command(self, *args, tags: Optional[List[str]] = None) -> List[str]: + """ + Construct a restic command against the currently configured repo + Can support [nested] arrays as arguments, will flatten them into the final commmand + """ if tags is None: tags = [] @@ -384,15 +388,15 @@ class ResticBackupper(AbstractBackupper): output, ) + def forget_snapshot(self, snapshot_id: str) -> None: + self.forget_snapshots([snapshot_id]) + @unlocked_repo - def forget_snapshot(self, snapshot_id) -> None: - """ - Either removes snapshot or marks it for deletion later, - depending on server settings - """ + def forget_snapshots(self, snapshot_ids: List[str]) -> None: + # in case the backupper program supports batching, otherwise implement it by cycling forget_command = self.restic_command( "forget", - snapshot_id, + [snapshot_ids], # TODO: prune should be done in a separate process "--prune", ) @@ -414,7 +418,7 @@ class ResticBackupper(AbstractBackupper): if "no matching ID found" in err: raise ValueError( - "trying to delete, but no such snapshot: ", snapshot_id + "trying to delete, but no such snapshot(s): ", snapshot_ids ) assert ( From 25378273eb05952cf4f4bf8675f3c8071547d459 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 17 Nov 2023 14:48:41 +0000 Subject: [PATCH 104/246] refactor(backups): also batch the forget_all_snapshots command --- selfprivacy_api/backup/__init__.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 3b21a29..623a2ab 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -629,8 +629,7 @@ class Backups: """deliberately erase all snapshots we made""" # there is no dedicated optimized command for this, # but maybe we can have a multi-erase - for snapshot in Backups.get_all_snapshots(): - Backups.forget_snapshot(snapshot) + Backups.forget_snapshots(Backups.get_all_snapshots()) @staticmethod def force_snapshot_cache_reload() -> None: From 5ac93c30ae4265afdaa0728d30fef6fe32755764 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 17 Nov 2023 15:22:21 +0000 Subject: [PATCH 105/246] refactor(backups): async quota setting --- selfprivacy_api/backup/tasks.py | 27 ++++++++++++++++++- .../graphql/mutations/backup_mutations.py | 17 ++++++++++-- 2 files changed, 41 insertions(+), 3 deletions(-) diff --git a/selfprivacy_api/backup/tasks.py b/selfprivacy_api/backup/tasks.py index bdf6c9f..f3de55a 100644 --- a/selfprivacy_api/backup/tasks.py +++ b/selfprivacy_api/backup/tasks.py @@ -3,13 +3,19 @@ The tasks module contains the worker tasks that are used to back up and restore """ from datetime import datetime, timezone -from selfprivacy_api.graphql.common_types.backup import RestoreStrategy, BackupReason +from selfprivacy_api.graphql.common_types.backup import ( + RestoreStrategy, + BackupReason, + AutobackupQuotas, +) from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.utils.huey import huey from huey import crontab from selfprivacy_api.services.service import Service from selfprivacy_api.backup import Backups +from selfprivacy_api.jobs import Jobs, JobStatus, Job + SNAPSHOT_CACHE_TTL_HOURS = 6 @@ -36,6 +42,25 @@ def start_backup( return True +@huey.task() +def set_autobackup_quotas(quotas: AutobackupQuotas, job: Job) -> bool: + job = Jobs.add( + name="trimming autobackup snapshots", + type_id="backups.autobackup_trimming", + description="Pruning the excessive snapshots after the new autobackup quotas are set", + status=JobStatus.RUNNING, + ) + Jobs.update(job, JobStatus.RUNNING) + try: + Backups.set_autobackup_quotas(quotas) + except Exception as e: + Jobs.update(job, JobStatus.ERROR, error=type(e).__name__ + ":" + str(e)) + return False + + Jobs.update(job, JobStatus.FINISHED) + return True + + @huey.task() def restore_snapshot( snapshot: Snapshot, diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index 954d0d4..41f37b5 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -1,6 +1,8 @@ import typing import strawberry +from selfprivacy_api.jobs import Jobs + from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.graphql.mutations.mutation_interface import ( GenericMutationReturn, @@ -18,7 +20,11 @@ from selfprivacy_api.graphql.common_types.backup import ( from selfprivacy_api.backup import Backups from selfprivacy_api.services import get_service_by_id -from selfprivacy_api.backup.tasks import start_backup, restore_snapshot +from selfprivacy_api.backup.tasks import ( + start_backup, + restore_snapshot, + set_autobackup_quotas, +) from selfprivacy_api.backup.jobs import add_backup_job, add_restore_job @@ -103,8 +109,15 @@ class BackupMutations: To disable autobackup use autobackup period setting, not this mutation. """ + job = Jobs.add( + name="trimming autobackup snapshots", + type_id="backups.autobackup_trimming", + description="Pruning the excessive snapshots after the new autobackup quotas are set", + ) + try: - Backups.set_autobackup_quotas(quotas) + # this is async and can fail with only a job to report the error + set_autobackup_quotas(quotas, job) return GenericBackupConfigReturn( success=True, message="", From f1a452009a13d8e1f5720ac03d5a452b3be21176 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 17 Nov 2023 15:39:21 +0000 Subject: [PATCH 106/246] refactor(backups): only async snapshot trimming --- selfprivacy_api/backup/__init__.py | 3 +++ selfprivacy_api/backup/tasks.py | 13 +++++-------- .../graphql/mutations/backup_mutations.py | 7 ++++--- tests/test_graphql/test_backup.py | 1 + 4 files changed, 13 insertions(+), 11 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 623a2ab..62fec61 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -425,7 +425,10 @@ class Backups: yearly=Backups._standardize_quotas(quotas.yearly), # type: ignore ) ) + # do not prune all autosnaps right away, this will be done by an async task + @staticmethod + def prune_all_autosnaps() -> None: for service in get_all_services(): Backups._prune_auto_snaps(service) diff --git a/selfprivacy_api/backup/tasks.py b/selfprivacy_api/backup/tasks.py index f3de55a..202f9e8 100644 --- a/selfprivacy_api/backup/tasks.py +++ b/selfprivacy_api/backup/tasks.py @@ -43,16 +43,13 @@ def start_backup( @huey.task() -def set_autobackup_quotas(quotas: AutobackupQuotas, job: Job) -> bool: - job = Jobs.add( - name="trimming autobackup snapshots", - type_id="backups.autobackup_trimming", - description="Pruning the excessive snapshots after the new autobackup quotas are set", - status=JobStatus.RUNNING, - ) +def prune_autobackup_snapshots(quotas: AutobackupQuotas, job: Job) -> bool: + """ + Remove all autobackup snapshots that do not fit into quotas set + """ Jobs.update(job, JobStatus.RUNNING) try: - Backups.set_autobackup_quotas(quotas) + Backups.prune_all_autosnaps() except Exception as e: Jobs.update(job, JobStatus.ERROR, error=type(e).__name__ + ":" + str(e)) return False diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index 41f37b5..4bb6331 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -23,7 +23,7 @@ from selfprivacy_api.services import get_service_by_id from selfprivacy_api.backup.tasks import ( start_backup, restore_snapshot, - set_autobackup_quotas, + prune_autobackup_snapshots, ) from selfprivacy_api.backup.jobs import add_backup_job, add_restore_job @@ -116,8 +116,9 @@ class BackupMutations: ) try: - # this is async and can fail with only a job to report the error - set_autobackup_quotas(quotas, job) + Backups.set_autobackup_quotas(quotas) + # this task is async and can fail with only a job to report the error + prune_autobackup_snapshots(job) return GenericBackupConfigReturn( success=True, message="", diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 27a2879..c2ac0ba 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -651,6 +651,7 @@ def test_too_many_auto(backups, dummy_service): # Retroactivity quota.last = 1 Backups.set_autobackup_quotas(quota) + Backups.prune_all_autosnaps() snaps = Backups.get_snapshots(dummy_service) assert len(snaps) == 1 From 4339c00058f5933a910b748828a46841fdc9037b Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 17 Nov 2023 15:53:57 +0000 Subject: [PATCH 107/246] test(backups): test the trimming task --- selfprivacy_api/backup/tasks.py | 2 +- tests/test_graphql/test_backup.py | 5 ++++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/backup/tasks.py b/selfprivacy_api/backup/tasks.py index 202f9e8..6fddd1e 100644 --- a/selfprivacy_api/backup/tasks.py +++ b/selfprivacy_api/backup/tasks.py @@ -43,7 +43,7 @@ def start_backup( @huey.task() -def prune_autobackup_snapshots(quotas: AutobackupQuotas, job: Job) -> bool: +def prune_autobackup_snapshots(job: Job) -> bool: """ Remove all autobackup snapshots that do not fit into quotas set """ diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index c2ac0ba..ede0e2b 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -44,6 +44,7 @@ from selfprivacy_api.backup.tasks import ( start_backup, restore_snapshot, reload_snapshot_cache, + prune_autobackup_snapshots ) from selfprivacy_api.backup.storage import Storage from selfprivacy_api.backup.jobs import get_backup_job @@ -651,7 +652,9 @@ def test_too_many_auto(backups, dummy_service): # Retroactivity quota.last = 1 Backups.set_autobackup_quotas(quota) - Backups.prune_all_autosnaps() + job = Jobs.add("trimming", "test.autobackup_trimming", "trimming the snaps!") + handle=prune_autobackup_snapshots(job) + handle(blocking=True) snaps = Backups.get_snapshots(dummy_service) assert len(snaps) == 1 From 993b58d52da211771e2bc42878d99af511fac2dc Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 20 Nov 2023 11:14:22 +0000 Subject: [PATCH 108/246] beauty(backup): remove stray comments and imports, format --- selfprivacy_api/backup/__init__.py | 9 +++--- selfprivacy_api/backup/tasks.py | 1 - .../graphql/mutations/backup_mutations.py | 2 +- tests/test_graphql/test_backup.py | 30 ++++++++----------- 4 files changed, 18 insertions(+), 24 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 62fec61..66a4eac 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -395,8 +395,6 @@ class Backups: auto_snaps = Backups._auto_snaps(service) new_snaplist = Backups._prune_snaps_with_quotas(auto_snaps) - # TODO: Can be optimized since there is forgetting of an array in one restic op - # but most of the time this will be only one snap to forget. deletable_snaps = [snap for snap in auto_snaps if snap not in new_snaplist] Backups.forget_snapshots(deletable_snaps) @@ -629,9 +627,10 @@ class Backups: @staticmethod def forget_all_snapshots(): - """deliberately erase all snapshots we made""" - # there is no dedicated optimized command for this, - # but maybe we can have a multi-erase + """ + Mark all snapshots we have made for deletion and make them inaccessible + (this is done by cloud, we only issue a command) + """ Backups.forget_snapshots(Backups.get_all_snapshots()) @staticmethod diff --git a/selfprivacy_api/backup/tasks.py b/selfprivacy_api/backup/tasks.py index 6fddd1e..f0422ca 100644 --- a/selfprivacy_api/backup/tasks.py +++ b/selfprivacy_api/backup/tasks.py @@ -6,7 +6,6 @@ from datetime import datetime, timezone from selfprivacy_api.graphql.common_types.backup import ( RestoreStrategy, BackupReason, - AutobackupQuotas, ) from selfprivacy_api.models.backup.snapshot import Snapshot diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index 4bb6331..cc1538e 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -110,7 +110,7 @@ class BackupMutations: """ job = Jobs.add( - name="trimming autobackup snapshots", + name="Trimming autobackup snapshots", type_id="backups.autobackup_trimming", description="Pruning the excessive snapshots after the new autobackup quotas are set", ) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index ede0e2b..b66a90d 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -1,53 +1,49 @@ import pytest + import os import os.path as path from os import makedirs from os import remove from os import listdir from os import urandom -from datetime import datetime, timedelta, timezone, date, time -from subprocess import Popen + +from datetime import datetime, timedelta, timezone from copy import copy - -import secrets - - import tempfile from selfprivacy_api.utils.huey import huey import selfprivacy_api.services as services from selfprivacy_api.services import Service, get_all_services -from selfprivacy_api.services.service import ServiceStatus - from selfprivacy_api.services import get_service_by_id +from selfprivacy_api.services.service import ServiceStatus from selfprivacy_api.services.test_service import DummyService + from selfprivacy_api.graphql.queries.providers import BackupProvider -from selfprivacy_api.graphql.common_types.backup import RestoreStrategy, BackupReason +from selfprivacy_api.graphql.common_types.backup import ( + RestoreStrategy, + BackupReason, + AutobackupQuotas, +) + from selfprivacy_api.jobs import Jobs, JobStatus from selfprivacy_api.models.backup.snapshot import Snapshot -from selfprivacy_api.graphql.common_types.backup import AutobackupQuotas - from selfprivacy_api.backup import Backups, BACKUP_PROVIDER_ENVS import selfprivacy_api.backup.providers as providers from selfprivacy_api.backup.providers import AbstractBackupProvider from selfprivacy_api.backup.providers.backblaze import Backblaze from selfprivacy_api.backup.providers.none import NoBackups from selfprivacy_api.backup.util import sync -from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackupper -from selfprivacy_api.backup.jobs import add_backup_job, add_restore_job - from selfprivacy_api.backup.tasks import ( start_backup, restore_snapshot, reload_snapshot_cache, - prune_autobackup_snapshots + prune_autobackup_snapshots, ) from selfprivacy_api.backup.storage import Storage -from selfprivacy_api.backup.jobs import get_backup_job TESTFILE_BODY = "testytest!" @@ -653,7 +649,7 @@ def test_too_many_auto(backups, dummy_service): quota.last = 1 Backups.set_autobackup_quotas(quota) job = Jobs.add("trimming", "test.autobackup_trimming", "trimming the snaps!") - handle=prune_autobackup_snapshots(job) + handle = prune_autobackup_snapshots(job) handle(blocking=True) snaps = Backups.get_snapshots(dummy_service) assert len(snaps) == 1 From 6d09c7aa9b8d1db5a6e5ccbad7d8665c363b08a4 Mon Sep 17 00:00:00 2001 From: Inex Code Date: Fri, 24 Nov 2023 13:57:52 +0300 Subject: [PATCH 109/246] feat: Provide display names in DNS records --- selfprivacy_api/graphql/common_types/dns.py | 1 + .../graphql/common_types/service.py | 1 + selfprivacy_api/graphql/queries/system.py | 1 + selfprivacy_api/services/__init__.py | 2 ++ .../services/bitwarden/__init__.py | 2 ++ selfprivacy_api/services/gitea/__init__.py | 2 ++ selfprivacy_api/services/jitsi/__init__.py | 2 ++ .../services/mailserver/__init__.py | 22 ++++++++++++++++--- .../services/nextcloud/__init__.py | 2 ++ selfprivacy_api/services/ocserv/__init__.py | 2 ++ selfprivacy_api/services/pleroma/__init__.py | 2 ++ selfprivacy_api/services/service.py | 1 + .../services/test_service/__init__.py | 2 ++ 13 files changed, 39 insertions(+), 3 deletions(-) diff --git a/selfprivacy_api/graphql/common_types/dns.py b/selfprivacy_api/graphql/common_types/dns.py index c9f8413..1c79036 100644 --- a/selfprivacy_api/graphql/common_types/dns.py +++ b/selfprivacy_api/graphql/common_types/dns.py @@ -11,3 +11,4 @@ class DnsRecord: content: str ttl: int priority: typing.Optional[int] + display_name: str diff --git a/selfprivacy_api/graphql/common_types/service.py b/selfprivacy_api/graphql/common_types/service.py index 319ce3e..56e12b1 100644 --- a/selfprivacy_api/graphql/common_types/service.py +++ b/selfprivacy_api/graphql/common_types/service.py @@ -139,6 +139,7 @@ def service_to_graphql_service(service: ServiceInterface) -> Service: content=record.content, ttl=record.ttl, priority=record.priority, + display_name=record.display_name, ) for record in service.get_dns_records() ], diff --git a/selfprivacy_api/graphql/queries/system.py b/selfprivacy_api/graphql/queries/system.py index cc30fd7..82c9260 100644 --- a/selfprivacy_api/graphql/queries/system.py +++ b/selfprivacy_api/graphql/queries/system.py @@ -33,6 +33,7 @@ class SystemDomainInfo: content=record.content, ttl=record.ttl, priority=record.priority, + display_name=record.display_name, ) for record in get_all_required_dns_records() ] diff --git a/selfprivacy_api/services/__init__.py b/selfprivacy_api/services/__init__.py index 02bb1d3..50ef76a 100644 --- a/selfprivacy_api/services/__init__.py +++ b/selfprivacy_api/services/__init__.py @@ -54,12 +54,14 @@ def get_all_required_dns_records() -> list[ServiceDnsRecord]: name="api", content=ip4, ttl=3600, + display_name="SelfPrivacy API", ), ServiceDnsRecord( type="AAAA", name="api", content=ip6, ttl=3600, + display_name="SelfPrivacy API (IPv6)", ), ] for service in get_enabled_services(): diff --git a/selfprivacy_api/services/bitwarden/__init__.py b/selfprivacy_api/services/bitwarden/__init__.py index 2f695fd..0d1dfdc 100644 --- a/selfprivacy_api/services/bitwarden/__init__.py +++ b/selfprivacy_api/services/bitwarden/__init__.py @@ -129,12 +129,14 @@ class Bitwarden(Service): name="password", content=network_utils.get_ip4(), ttl=3600, + display_name="Bitwarden", ), ServiceDnsRecord( type="AAAA", name="password", content=network_utils.get_ip6(), ttl=3600, + display_name="Bitwarden (IPv6)", ), ] diff --git a/selfprivacy_api/services/gitea/__init__.py b/selfprivacy_api/services/gitea/__init__.py index fcb9ca7..08f223e 100644 --- a/selfprivacy_api/services/gitea/__init__.py +++ b/selfprivacy_api/services/gitea/__init__.py @@ -123,12 +123,14 @@ class Gitea(Service): name="git", content=network_utils.get_ip4(), ttl=3600, + display_name="Gitea", ), ServiceDnsRecord( type="AAAA", name="git", content=network_utils.get_ip6(), ttl=3600, + display_name="Gitea (IPv6)", ), ] diff --git a/selfprivacy_api/services/jitsi/__init__.py b/selfprivacy_api/services/jitsi/__init__.py index 2684fc3..fed6f33 100644 --- a/selfprivacy_api/services/jitsi/__init__.py +++ b/selfprivacy_api/services/jitsi/__init__.py @@ -132,12 +132,14 @@ class Jitsi(Service): name="meet", content=ip4, ttl=3600, + display_name="Jitsi", ), ServiceDnsRecord( type="AAAA", name="meet", content=ip6, ttl=3600, + display_name="Jitsi (IPv6)", ), ] diff --git a/selfprivacy_api/services/mailserver/__init__.py b/selfprivacy_api/services/mailserver/__init__.py index d0f70eb..e36a694 100644 --- a/selfprivacy_api/services/mailserver/__init__.py +++ b/selfprivacy_api/services/mailserver/__init__.py @@ -121,27 +121,43 @@ class MailServer(Service): name=domain, content=ip4, ttl=3600, + display_name="Root Domain", ), ServiceDnsRecord( type="AAAA", name=domain, content=ip6, ttl=3600, + display_name="Root Domain (IPv6)", ), ServiceDnsRecord( - type="MX", name=domain, content=domain, ttl=3600, priority=10 + type="MX", + name=domain, + content=domain, + ttl=3600, + priority=10, + display_name="Mail server record", ), ServiceDnsRecord( - type="TXT", name="_dmarc", content="v=DMARC1; p=none", ttl=18000 + type="TXT", + name="_dmarc", + content="v=DMARC1; p=none", + ttl=18000, + display_name="DMARC record", ), ServiceDnsRecord( type="TXT", name=domain, content=f"v=spf1 a mx ip4:{ip4} -all", ttl=18000, + display_name="SPF record", ), ServiceDnsRecord( - type="TXT", name="selector._domainkey", content=dkim_record, ttl=18000 + type="TXT", + name="selector._domainkey", + content=dkim_record, + ttl=18000, + display_name="DKIM key", ), ] diff --git a/selfprivacy_api/services/nextcloud/__init__.py b/selfprivacy_api/services/nextcloud/__init__.py index 4ac01af..1703478 100644 --- a/selfprivacy_api/services/nextcloud/__init__.py +++ b/selfprivacy_api/services/nextcloud/__init__.py @@ -128,12 +128,14 @@ class Nextcloud(Service): name="cloud", content=network_utils.get_ip4(), ttl=3600, + display_name="Nextcloud", ), ServiceDnsRecord( type="AAAA", name="cloud", content=network_utils.get_ip6(), ttl=3600, + display_name="Nextcloud (IPv6)", ), ] diff --git a/selfprivacy_api/services/ocserv/__init__.py b/selfprivacy_api/services/ocserv/__init__.py index 98c6e97..d9d59a0 100644 --- a/selfprivacy_api/services/ocserv/__init__.py +++ b/selfprivacy_api/services/ocserv/__init__.py @@ -106,12 +106,14 @@ class Ocserv(Service): name="vpn", content=network_utils.get_ip4(), ttl=3600, + display_name="OpenConnect VPN", ), ServiceDnsRecord( type="AAAA", name="vpn", content=network_utils.get_ip6(), ttl=3600, + display_name="OpenConnect VPN (IPv6)", ), ] diff --git a/selfprivacy_api/services/pleroma/__init__.py b/selfprivacy_api/services/pleroma/__init__.py index d98b13f..b2540d8 100644 --- a/selfprivacy_api/services/pleroma/__init__.py +++ b/selfprivacy_api/services/pleroma/__init__.py @@ -127,12 +127,14 @@ class Pleroma(Service): name="social", content=network_utils.get_ip4(), ttl=3600, + display_name="Pleroma", ), ServiceDnsRecord( type="AAAA", name="social", content=network_utils.get_ip6(), ttl=3600, + display_name="Pleroma (IPv6)", ), ] diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index b66bd19..8446133 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -33,6 +33,7 @@ class ServiceDnsRecord(BaseModel): name: str content: str ttl: int + display_name: str priority: typing.Optional[int] = None diff --git a/selfprivacy_api/services/test_service/__init__.py b/selfprivacy_api/services/test_service/__init__.py index 6ae33ef..187a1c6 100644 --- a/selfprivacy_api/services/test_service/__init__.py +++ b/selfprivacy_api/services/test_service/__init__.py @@ -186,12 +186,14 @@ class DummyService(Service): name="password", content=network_utils.get_ip4(), ttl=3600, + display_name="Test Service", ), ServiceDnsRecord( type="AAAA", name="password", content=network_utils.get_ip6(), ttl=3600, + display_name="Test Service (IPv6)", ), ] From cbd3cafe0a19d15f54e7915300b1142067d1c1bc Mon Sep 17 00:00:00 2001 From: Inex Code Date: Mon, 25 Dec 2023 10:42:18 +0300 Subject: [PATCH 110/246] chore: Bump version --- selfprivacy_api/dependencies.py | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/dependencies.py b/selfprivacy_api/dependencies.py index 1955601..05c9bdc 100644 --- a/selfprivacy_api/dependencies.py +++ b/selfprivacy_api/dependencies.py @@ -27,4 +27,4 @@ async def get_token_header( def get_api_version() -> str: """Get API version""" - return "2.4.2" + return "2.4.3" diff --git a/setup.py b/setup.py index 399b157..93637ff 100755 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import setup, find_packages setup( name="selfprivacy_api", - version="2.4.2", + version="2.4.3", packages=find_packages(), scripts=[ "selfprivacy_api/app.py", From cda8d70bd946541e9b07868d052995052a459e7f Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 14 Aug 2023 14:49:58 +0000 Subject: [PATCH 111/246] test(rest-dismantling): remove auth tests after confirming gql counterparts exist --- tests/test_rest_endpoints/test_auth.py | 457 ------------------------- 1 file changed, 457 deletions(-) delete mode 100644 tests/test_rest_endpoints/test_auth.py diff --git a/tests/test_rest_endpoints/test_auth.py b/tests/test_rest_endpoints/test_auth.py deleted file mode 100644 index 4d0d2ed..0000000 --- a/tests/test_rest_endpoints/test_auth.py +++ /dev/null @@ -1,457 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=missing-function-docstring -import datetime -from datetime import timezone -import pytest - -from tests.conftest import TOKENS_FILE_CONTENTS -from tests.common import ( - RECOVERY_KEY_VALIDATION_DATETIME, - DEVICE_KEY_VALIDATION_DATETIME, - NearFuture, - assert_recovery_recent, -) -from tests.common import five_minutes_into_future_naive_utc as five_minutes_into_future -from tests.common import five_minutes_into_past_naive_utc as five_minutes_into_past - -DATE_FORMATS = [ - "%Y-%m-%dT%H:%M:%S.%fZ", - "%Y-%m-%dT%H:%M:%S.%f", - "%Y-%m-%d %H:%M:%S.%fZ", - "%Y-%m-%d %H:%M:%S.%f", -] - - -def assert_original(client): - new_tokens = rest_get_tokens_info(client) - - for token in TOKENS_FILE_CONTENTS["tokens"]: - assert_token_valid(client, token["token"]) - for new_token in new_tokens: - if new_token["name"] == token["name"]: - assert ( - datetime.datetime.fromisoformat(new_token["date"]) == token["date"] - ) - assert_no_recovery(client) - - -def assert_token_valid(client, token): - client.headers.update({"Authorization": "Bearer " + token}) - assert rest_get_tokens_info(client) is not None - - -def rest_get_tokens_info(client): - response = client.get("/auth/tokens") - assert response.status_code == 200 - return response.json() - - -def rest_try_authorize_new_device(client, token, device_name): - response = client.post( - "/auth/new_device/authorize", - json={ - "token": token, - "device": device_name, - }, - ) - return response - - -def rest_make_recovery_token(client, expires_at=None, timeformat=None, uses=None): - json = {} - - if expires_at is not None: - assert timeformat is not None - expires_at_str = expires_at.strftime(timeformat) - json["expiration"] = expires_at_str - - if uses is not None: - json["uses"] = uses - - if json == {}: - response = client.post("/auth/recovery_token") - else: - response = client.post( - "/auth/recovery_token", - json=json, - ) - - if not response.status_code == 200: - raise ValueError(response.reason, response.text, response.json()["detail"]) - assert response.status_code == 200 - assert "token" in response.json() - return response.json()["token"] - - -def rest_get_recovery_status(client): - response = client.get("/auth/recovery_token") - assert response.status_code == 200 - return response.json() - - -def rest_get_recovery_date(client): - status = rest_get_recovery_status(client) - assert "date" in status - return status["date"] - - -def assert_no_recovery(client): - assert not rest_get_recovery_status(client)["exists"] - - -def rest_recover_with_mnemonic(client, mnemonic_token, device_name): - recovery_response = client.post( - "/auth/recovery_token/use", - json={"token": mnemonic_token, "device": device_name}, - ) - assert recovery_response.status_code == 200 - new_token = recovery_response.json()["token"] - assert_token_valid(client, new_token) - return new_token - - -# Tokens - - -def test_get_tokens_info(authorized_client, tokens_file): - assert sorted(rest_get_tokens_info(authorized_client), key=lambda x: x["name"]) == [ - {"name": "test_token", "date": "2022-01-14T08:31:10.789314", "is_caller": True}, - { - "name": "test_token2", - "date": "2022-01-14T08:31:10.789314", - "is_caller": False, - }, - ] - - -def test_get_tokens_unauthorized(client, tokens_file): - response = client.get("/auth/tokens") - assert response.status_code == 401 - - -def test_delete_token_unauthorized(client, authorized_client, tokens_file): - response = client.delete("/auth/tokens") - assert response.status_code == 401 - assert_original(authorized_client) - - -def test_delete_token(authorized_client, tokens_file): - response = authorized_client.delete( - "/auth/tokens", json={"token_name": "test_token2"} - ) - assert response.status_code == 200 - assert rest_get_tokens_info(authorized_client) == [ - {"name": "test_token", "date": "2022-01-14T08:31:10.789314", "is_caller": True} - ] - - -def test_delete_self_token(authorized_client, tokens_file): - response = authorized_client.delete( - "/auth/tokens", json={"token_name": "test_token"} - ) - assert response.status_code == 400 - assert_original(authorized_client) - - -def test_delete_nonexistent_token(authorized_client, tokens_file): - response = authorized_client.delete( - "/auth/tokens", json={"token_name": "test_token3"} - ) - assert response.status_code == 404 - assert_original(authorized_client) - - -def test_refresh_token_unauthorized(client, authorized_client, tokens_file): - response = client.post("/auth/tokens") - assert response.status_code == 401 - assert_original(authorized_client) - - -def test_refresh_token(authorized_client, tokens_file): - response = authorized_client.post("/auth/tokens") - assert response.status_code == 200 - new_token = response.json()["token"] - assert_token_valid(authorized_client, new_token) - - -# New device - - -def test_get_new_device_auth_token_unauthorized(client, authorized_client, tokens_file): - response = client.post("/auth/new_device") - assert response.status_code == 401 - assert "token" not in response.json() - assert "detail" in response.json() - # We only can check existence of a token we know. - - -def test_get_and_delete_new_device_token(client, authorized_client, tokens_file): - token = rest_get_new_device_token(authorized_client) - response = authorized_client.delete("/auth/new_device", json={"token": token}) - assert response.status_code == 200 - assert rest_try_authorize_new_device(client, token, "new_device").status_code == 404 - - -def test_delete_token_unauthenticated(client, authorized_client, tokens_file): - token = rest_get_new_device_token(authorized_client) - response = client.delete("/auth/new_device", json={"token": token}) - assert response.status_code == 401 - assert rest_try_authorize_new_device(client, token, "new_device").status_code == 200 - - -def rest_get_new_device_token(client): - response = client.post("/auth/new_device") - assert response.status_code == 200 - assert "token" in response.json() - return response.json()["token"] - - -def test_get_and_authorize_new_device(client, authorized_client, tokens_file): - token = rest_get_new_device_token(authorized_client) - response = rest_try_authorize_new_device(client, token, "new_device") - assert response.status_code == 200 - assert_token_valid(authorized_client, response.json()["token"]) - - -def test_authorize_new_device_with_invalid_token( - client, authorized_client, tokens_file -): - response = rest_try_authorize_new_device(client, "invalid_token", "new_device") - assert response.status_code == 404 - assert_original(authorized_client) - - -def test_get_and_authorize_used_token(client, authorized_client, tokens_file): - token_to_be_used_2_times = rest_get_new_device_token(authorized_client) - response = rest_try_authorize_new_device( - client, token_to_be_used_2_times, "new_device" - ) - assert response.status_code == 200 - assert_token_valid(authorized_client, response.json()["token"]) - - response = rest_try_authorize_new_device( - client, token_to_be_used_2_times, "new_device" - ) - assert response.status_code == 404 - - -def test_get_and_authorize_token_after_12_minutes( - client, authorized_client, tokens_file, mocker -): - token = rest_get_new_device_token(authorized_client) - - # TARDIS sounds - mock = mocker.patch(DEVICE_KEY_VALIDATION_DATETIME, NearFuture) - - response = rest_try_authorize_new_device(client, token, "new_device") - assert response.status_code == 404 - assert_original(authorized_client) - - -def test_authorize_without_token(client, authorized_client, tokens_file): - response = client.post( - "/auth/new_device/authorize", - json={"device": "new_device"}, - ) - assert response.status_code == 422 - assert_original(authorized_client) - - -# Recovery tokens -# GET /auth/recovery_token returns token status -# - if token is valid, returns 200 and token status -# - token status: -# - exists (boolean) -# - valid (boolean) -# - date (string) -# - expiration (string) -# - uses_left (int) -# - if token is invalid, returns 400 and empty body -# POST /auth/recovery_token generates a new token -# has two optional parameters: -# - expiration (string in datetime format) -# - uses_left (int) -# POST /auth/recovery_token/use uses the token -# required arguments: -# - token (string) -# - device (string) -# - if token is valid, returns 200 and token -# - if token is invalid, returns 404 -# - if request is invalid, returns 400 - - -def test_get_recovery_token_status_unauthorized(client, authorized_client, tokens_file): - response = client.get("/auth/recovery_token") - assert response.status_code == 401 - assert_original(authorized_client) - - -def test_get_recovery_token_when_none_exists(authorized_client, tokens_file): - response = authorized_client.get("/auth/recovery_token") - assert response.status_code == 200 - assert response.json() == { - "exists": False, - "valid": False, - "date": None, - "expiration": None, - "uses_left": None, - } - assert_original(authorized_client) - - -def test_generate_recovery_token(authorized_client, client, tokens_file): - # Generate token without expiration and uses_left - mnemonic_token = rest_make_recovery_token(authorized_client) - - time_generated = rest_get_recovery_date(authorized_client) - assert_recovery_recent(time_generated) - - assert rest_get_recovery_status(authorized_client) == { - "exists": True, - "valid": True, - "date": time_generated, - "expiration": None, - "uses_left": None, - } - - rest_recover_with_mnemonic(client, mnemonic_token, "recover_device") - # And again - rest_recover_with_mnemonic(client, mnemonic_token, "recover_device2") - - -@pytest.mark.parametrize("timeformat", DATE_FORMATS) -def test_generate_recovery_token_with_expiration_date( - authorized_client, client, tokens_file, timeformat, mocker -): - # Generate token with expiration date - # Generate expiration date in the future - expiration_date = five_minutes_into_future() - mnemonic_token = rest_make_recovery_token( - authorized_client, expires_at=expiration_date, timeformat=timeformat - ) - - time_generated = rest_get_recovery_date(authorized_client) - assert_recovery_recent(time_generated) - - assert rest_get_recovery_status(authorized_client) == { - "exists": True, - "valid": True, - "date": time_generated, - "expiration": expiration_date.replace(tzinfo=timezone.utc).isoformat(), - "uses_left": None, - } - - rest_recover_with_mnemonic(client, mnemonic_token, "recover_device") - # And again - rest_recover_with_mnemonic(client, mnemonic_token, "recover_device2") - - # Try to use token after expiration date - mock = mocker.patch(RECOVERY_KEY_VALIDATION_DATETIME, NearFuture) - device_name = "recovery_device3" - recovery_response = client.post( - "/auth/recovery_token/use", - json={"token": mnemonic_token, "device": device_name}, - ) - assert recovery_response.status_code == 404 - # Assert that the token was not created - assert device_name not in [ - token["name"] for token in rest_get_tokens_info(authorized_client) - ] - - -@pytest.mark.parametrize("timeformat", DATE_FORMATS) -def test_generate_recovery_token_with_expiration_in_the_past( - authorized_client, tokens_file, timeformat -): - # Server must return 400 if expiration date is in the past - expiration_date = five_minutes_into_past() - expiration_date_str = expiration_date.strftime(timeformat) - response = authorized_client.post( - "/auth/recovery_token", - json={"expiration": expiration_date_str}, - ) - assert response.status_code == 400 - assert_no_recovery(authorized_client) - - -def test_generate_recovery_token_with_invalid_time_format( - authorized_client, tokens_file -): - # Server must return 400 if expiration date is in the past - expiration_date = "invalid_time_format" - response = authorized_client.post( - "/auth/recovery_token", - json={"expiration": expiration_date}, - ) - assert response.status_code == 422 - assert_no_recovery(authorized_client) - - -def test_generate_recovery_token_with_limited_uses( - authorized_client, client, tokens_file -): - # Generate token with limited uses - mnemonic_token = rest_make_recovery_token(authorized_client, uses=2) - - time_generated = rest_get_recovery_date(authorized_client) - assert_recovery_recent(time_generated) - - assert rest_get_recovery_status(authorized_client) == { - "exists": True, - "valid": True, - "date": time_generated, - "expiration": None, - "uses_left": 2, - } - - # Try to use the token - rest_recover_with_mnemonic(client, mnemonic_token, "recover_device") - - assert rest_get_recovery_status(authorized_client) == { - "exists": True, - "valid": True, - "date": time_generated, - "expiration": None, - "uses_left": 1, - } - - # Try to use token again - rest_recover_with_mnemonic(client, mnemonic_token, "recover_device2") - - assert rest_get_recovery_status(authorized_client) == { - "exists": True, - "valid": False, - "date": time_generated, - "expiration": None, - "uses_left": 0, - } - - # Try to use token after limited uses - recovery_response = client.post( - "/auth/recovery_token/use", - json={"token": mnemonic_token, "device": "recovery_device3"}, - ) - assert recovery_response.status_code == 404 - - -def test_generate_recovery_token_with_negative_uses( - authorized_client, client, tokens_file -): - # Generate token with limited uses - response = authorized_client.post( - "/auth/recovery_token", - json={"uses": -2}, - ) - assert response.status_code == 400 - assert_no_recovery(authorized_client) - - -def test_generate_recovery_token_with_zero_uses(authorized_client, client, tokens_file): - # Generate token with limited uses - response = authorized_client.post( - "/auth/recovery_token", - json={"uses": 0}, - ) - assert response.status_code == 400 - assert_no_recovery(authorized_client) From 7c8ea19608a7d9d15eb6a0c387c7ce7bb40a1e9a Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 14 Aug 2023 15:45:41 +0000 Subject: [PATCH 112/246] test(rest-dismantling): remove system tests with gql counterparts --- tests/test_rest_endpoints/test_system.py | 249 ----------------------- 1 file changed, 249 deletions(-) diff --git a/tests/test_rest_endpoints/test_system.py b/tests/test_rest_endpoints/test_system.py index 90c1499..f2b20db 100644 --- a/tests/test_rest_endpoints/test_system.py +++ b/tests/test_rest_endpoints/test_system.py @@ -103,200 +103,6 @@ def mock_subprocess_check_output(mocker): return mock -def test_wrong_auth(wrong_auth_client): - response = wrong_auth_client.get("/system/pythonVersion") - assert response.status_code == 401 - - -def test_get_domain(authorized_client, domain_file): - assert get_domain() == "test-domain.tld" - - -## Timezones - - -def test_get_timezone_unauthorized(client, turned_on): - response = client.get("/system/configuration/timezone") - assert response.status_code == 401 - - -def test_get_timezone(authorized_client, turned_on): - response = authorized_client.get("/system/configuration/timezone") - assert response.status_code == 200 - assert response.json() == "Europe/Moscow" - - -def test_get_timezone_on_undefined(authorized_client, undefined_config): - response = authorized_client.get("/system/configuration/timezone") - assert response.status_code == 200 - assert response.json() == "Europe/Uzhgorod" - - -def test_put_timezone_unauthorized(client, turned_on): - response = client.put( - "/system/configuration/timezone", json={"timezone": "Europe/Moscow"} - ) - assert response.status_code == 401 - - -def test_put_timezone(authorized_client, turned_on): - response = authorized_client.put( - "/system/configuration/timezone", json={"timezone": "Europe/Helsinki"} - ) - assert response.status_code == 200 - assert read_json(turned_on / "turned_on.json")["timezone"] == "Europe/Helsinki" - - -def test_put_timezone_on_undefined(authorized_client, undefined_config): - response = authorized_client.put( - "/system/configuration/timezone", json={"timezone": "Europe/Helsinki"} - ) - assert response.status_code == 200 - assert ( - read_json(undefined_config / "undefined.json")["timezone"] == "Europe/Helsinki" - ) - - -def test_put_timezone_without_timezone(authorized_client, turned_on): - response = authorized_client.put("/system/configuration/timezone", json={}) - assert response.status_code == 422 - assert read_json(turned_on / "turned_on.json")["timezone"] == "Europe/Moscow" - - -def test_put_invalid_timezone(authorized_client, turned_on): - response = authorized_client.put( - "/system/configuration/timezone", json={"timezone": "Invalid/Timezone"} - ) - assert response.status_code == 400 - assert read_json(turned_on / "turned_on.json")["timezone"] == "Europe/Moscow" - - -## AutoUpgrade - - -def test_get_auto_upgrade_unauthorized(client, turned_on): - response = client.get("/system/configuration/autoUpgrade") - assert response.status_code == 401 - - -def test_get_auto_upgrade(authorized_client, turned_on): - response = authorized_client.get("/system/configuration/autoUpgrade") - assert response.status_code == 200 - assert response.json() == { - "enable": True, - "allowReboot": True, - } - - -def test_get_auto_upgrade_on_undefined(authorized_client, undefined_config): - response = authorized_client.get("/system/configuration/autoUpgrade") - assert response.status_code == 200 - assert response.json() == { - "enable": True, - "allowReboot": False, - } - - -def test_get_auto_upgrade_without_values(authorized_client, no_values): - response = authorized_client.get("/system/configuration/autoUpgrade") - assert response.status_code == 200 - assert response.json() == { - "enable": True, - "allowReboot": False, - } - - -def test_get_auto_upgrade_turned_off(authorized_client, turned_off): - response = authorized_client.get("/system/configuration/autoUpgrade") - assert response.status_code == 200 - assert response.json() == { - "enable": False, - "allowReboot": False, - } - - -def test_put_auto_upgrade_unauthorized(client, turned_on): - response = client.put( - "/system/configuration/autoUpgrade", json={"enable": True, "allowReboot": True} - ) - assert response.status_code == 401 - - -def test_put_auto_upgrade(authorized_client, turned_on): - response = authorized_client.put( - "/system/configuration/autoUpgrade", json={"enable": False, "allowReboot": True} - ) - assert response.status_code == 200 - assert read_json(turned_on / "turned_on.json")["autoUpgrade"] == { - "enable": False, - "allowReboot": True, - } - - -def test_put_auto_upgrade_on_undefined(authorized_client, undefined_config): - response = authorized_client.put( - "/system/configuration/autoUpgrade", json={"enable": False, "allowReboot": True} - ) - assert response.status_code == 200 - assert read_json(undefined_config / "undefined.json")["autoUpgrade"] == { - "enable": False, - "allowReboot": True, - } - - -def test_put_auto_upgrade_without_values(authorized_client, no_values): - response = authorized_client.put( - "/system/configuration/autoUpgrade", json={"enable": True, "allowReboot": True} - ) - assert response.status_code == 200 - assert read_json(no_values / "no_values.json")["autoUpgrade"] == { - "enable": True, - "allowReboot": True, - } - - -def test_put_auto_upgrade_turned_off(authorized_client, turned_off): - response = authorized_client.put( - "/system/configuration/autoUpgrade", json={"enable": True, "allowReboot": True} - ) - assert response.status_code == 200 - assert read_json(turned_off / "turned_off.json")["autoUpgrade"] == { - "enable": True, - "allowReboot": True, - } - - -def test_put_auto_upgrade_without_enable(authorized_client, turned_off): - response = authorized_client.put( - "/system/configuration/autoUpgrade", json={"allowReboot": True} - ) - assert response.status_code == 200 - assert read_json(turned_off / "turned_off.json")["autoUpgrade"] == { - "enable": False, - "allowReboot": True, - } - - -def test_put_auto_upgrade_without_allow_reboot(authorized_client, turned_off): - response = authorized_client.put( - "/system/configuration/autoUpgrade", json={"enable": True} - ) - assert response.status_code == 200 - assert read_json(turned_off / "turned_off.json")["autoUpgrade"] == { - "enable": True, - "allowReboot": False, - } - - -def test_put_auto_upgrade_with_empty_json(authorized_client, turned_off): - response = authorized_client.put("/system/configuration/autoUpgrade", json={}) - assert response.status_code == 200 - assert read_json(turned_off / "turned_off.json")["autoUpgrade"] == { - "enable": False, - "allowReboot": False, - } - - def test_system_rebuild_unauthorized(client, mock_subprocess_popen): response = client.get("/system/configuration/apply") assert response.status_code == 401 @@ -348,20 +154,6 @@ def test_system_rollback(authorized_client, mock_subprocess_popen): ] -def test_get_system_version_unauthorized(client, mock_subprocess_check_output): - response = client.get("/system/version") - assert response.status_code == 401 - assert mock_subprocess_check_output.call_count == 0 - - -def test_get_system_version(authorized_client, mock_subprocess_check_output): - response = authorized_client.get("/system/version") - assert response.status_code == 200 - assert response.json() == {"system_version": "Testing Linux"} - assert mock_subprocess_check_output.call_count == 1 - assert mock_subprocess_check_output.call_args[0][0] == ["uname", "-a"] - - def test_reboot_system_unauthorized(client, mock_subprocess_popen): response = client.get("/system/reboot") assert response.status_code == 401 @@ -373,44 +165,3 @@ def test_reboot_system(authorized_client, mock_subprocess_popen): assert response.status_code == 200 assert mock_subprocess_popen.call_count == 1 assert mock_subprocess_popen.call_args[0][0] == ["reboot"] - - -def test_get_python_version_unauthorized(client, mock_subprocess_check_output): - response = client.get("/system/pythonVersion") - assert response.status_code == 401 - assert mock_subprocess_check_output.call_count == 0 - - -def test_get_python_version(authorized_client, mock_subprocess_check_output): - response = authorized_client.get("/system/pythonVersion") - assert response.status_code == 200 - assert response.json() == "Testing Linux" - assert mock_subprocess_check_output.call_count == 1 - assert mock_subprocess_check_output.call_args[0][0] == ["python", "-V"] - - -def test_pull_system_unauthorized(client, mock_subprocess_popen): - response = client.get("/system/configuration/pull") - assert response.status_code == 401 - assert mock_subprocess_popen.call_count == 0 - - -def test_pull_system(authorized_client, mock_subprocess_popen, mock_os_chdir): - current_dir = os.getcwd() - response = authorized_client.get("/system/configuration/pull") - assert response.status_code == 200 - assert mock_subprocess_popen.call_count == 1 - assert mock_subprocess_popen.call_args[0][0] == ["git", "pull"] - assert mock_os_chdir.call_count == 2 - assert mock_os_chdir.call_args_list[0][0][0] == "/etc/nixos" - assert mock_os_chdir.call_args_list[1][0][0] == current_dir - - -def test_pull_system_broken_repo(authorized_client, mock_broken_service, mock_os_chdir): - current_dir = os.getcwd() - response = authorized_client.get("/system/configuration/pull") - assert response.status_code == 500 - assert mock_broken_service.call_count == 1 - assert mock_os_chdir.call_count == 2 - assert mock_os_chdir.call_args_list[0][0][0] == "/etc/nixos" - assert mock_os_chdir.call_args_list[1][0][0] == current_dir From d34b98e27bbdab1dc59902f1c11eadfea5717d6b Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 14 Aug 2023 16:42:26 +0000 Subject: [PATCH 113/246] test(rest-dismantling): remove user tests with gql counterparts --- tests/test_rest_endpoints/test_users.py | 120 +----------------------- 1 file changed, 5 insertions(+), 115 deletions(-) diff --git a/tests/test_rest_endpoints/test_users.py b/tests/test_rest_endpoints/test_users.py index ebb3eff..c7c5f5b 100644 --- a/tests/test_rest_endpoints/test_users.py +++ b/tests/test_rest_endpoints/test_users.py @@ -113,41 +113,6 @@ def mock_subprocess_popen(mocker): ## TESTS ###################################################### -def test_get_users_unauthorized(client, some_users, mock_subprocess_popen): - response = client.get("/users") - assert response.status_code == 401 - - -def test_get_some_users(authorized_client, some_users, mock_subprocess_popen): - response = authorized_client.get("/users") - assert response.status_code == 200 - assert response.json() == ["user1", "user2", "user3"] - - -def test_get_one_user(authorized_client, one_user, mock_subprocess_popen): - response = authorized_client.get("/users") - assert response.status_code == 200 - assert response.json() == ["user1"] - - -def test_get_one_user_with_main(authorized_client, one_user, mock_subprocess_popen): - response = authorized_client.get("/users?withMainUser=true") - assert response.status_code == 200 - assert response.json().sort() == ["tester", "user1"].sort() - - -def test_get_no_users(authorized_client, no_users, mock_subprocess_popen): - response = authorized_client.get("/users") - assert response.status_code == 200 - assert response.json() == [] - - -def test_get_no_users_with_main(authorized_client, no_users, mock_subprocess_popen): - response = authorized_client.get("/users?withMainUser=true") - assert response.status_code == 200 - assert response.json() == ["tester"] - - def test_get_undefined_users( authorized_client, undefined_settings, mock_subprocess_popen ): @@ -156,28 +121,7 @@ def test_get_undefined_users( assert response.json() == [] -def test_post_users_unauthorized(client, some_users, mock_subprocess_popen): - response = client.post("/users") - assert response.status_code == 401 - - -def test_post_one_user(authorized_client, one_user, mock_subprocess_popen): - response = authorized_client.post( - "/users", json={"username": "user4", "password": "password"} - ) - assert response.status_code == 201 - assert read_json(one_user / "one_user.json")["users"] == [ - { - "username": "user1", - "hashedPassword": "HASHED_PASSWORD_1", - "sshKeys": ["ssh-rsa KEY user1@pc"], - }, - { - "username": "user4", - "sshKeys": [], - "hashedPassword": "NEW_HASHED", - }, - ] +# graphql tests still provide these fields even if with empty values def test_post_without_username(authorized_client, one_user, mock_subprocess_popen): @@ -197,47 +141,10 @@ def test_post_without_username_and_password( assert response.status_code == 422 -@pytest.mark.parametrize("username", invalid_usernames) -def test_post_system_user(authorized_client, one_user, mock_subprocess_popen, username): - response = authorized_client.post( - "/users", json={"username": username, "password": "password"} - ) - assert response.status_code == 409 - - -def test_post_existing_user(authorized_client, one_user, mock_subprocess_popen): - response = authorized_client.post( - "/users", json={"username": "user1", "password": "password"} - ) - assert response.status_code == 409 - - -def test_post_existing_main_user(authorized_client, one_user, mock_subprocess_popen): - response = authorized_client.post( - "/users", json={"username": "tester", "password": "password"} - ) - assert response.status_code == 409 - - -def test_post_user_to_undefined_users( - authorized_client, undefined_settings, mock_subprocess_popen -): - response = authorized_client.post( - "/users", json={"username": "user4", "password": "password"} - ) - assert response.status_code == 201 - assert read_json(undefined_settings / "undefined.json")["users"] == [ - {"username": "user4", "sshKeys": [], "hashedPassword": "NEW_HASHED"} - ] - - -def test_post_very_long_username(authorized_client, one_user, mock_subprocess_popen): - response = authorized_client.post( - "/users", json={"username": "a" * 32, "password": "password"} - ) - assert response.status_code == 400 - +# end of BUT THERE ARE FIELDS! rant +# the final user is not in gql checks +# I think maybe generate a bunch? @pytest.mark.parametrize("username", ["", "1", "фыр", "user1@", "№:%##$^&@$&^()_"]) def test_post_invalid_username( authorized_client, one_user, mock_subprocess_popen, username @@ -248,16 +155,7 @@ def test_post_invalid_username( assert response.status_code == 400 -def test_delete_user_unauthorized(client, some_users, mock_subprocess_popen): - response = client.delete("/users/user1") - assert response.status_code == 401 - - -def test_delete_user_not_found(authorized_client, some_users, mock_subprocess_popen): - response = authorized_client.delete("/users/user4") - assert response.status_code == 404 - - +# gql counterpart is too weak def test_delete_user(authorized_client, some_users, mock_subprocess_popen): response = authorized_client.delete("/users/user1") assert response.status_code == 200 @@ -267,14 +165,6 @@ def test_delete_user(authorized_client, some_users, mock_subprocess_popen): ] -@pytest.mark.parametrize("username", invalid_usernames) -def test_delete_system_user( - authorized_client, some_users, mock_subprocess_popen, username -): - response = authorized_client.delete("/users/" + username) - assert response.status_code == 400 or response.status_code == 404 - - def test_delete_main_user(authorized_client, some_users, mock_subprocess_popen): response = authorized_client.delete("/users/tester") assert response.status_code == 400 From 011e052962f194f6a6dc09834f492f2c79d994a5 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 14 Aug 2023 11:50:59 +0000 Subject: [PATCH 114/246] test(backups): more checks regarding tmpdirs and mounting --- tests/test_graphql/test_backup.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index b66a90d..882e086 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -13,6 +13,8 @@ import tempfile from selfprivacy_api.utils.huey import huey +import tempfile + import selfprivacy_api.services as services from selfprivacy_api.services import Service, get_all_services from selfprivacy_api.services import get_service_by_id From d4b2ca14bbd6ec69a13b7962ddb0da3e9635be70 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 1 Sep 2023 10:41:27 +0000 Subject: [PATCH 115/246] feature(backups): a task to autorefresh cache. Redis expiry abolished --- selfprivacy_api/backup/tasks.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/selfprivacy_api/backup/tasks.py b/selfprivacy_api/backup/tasks.py index f0422ca..5b36252 100644 --- a/selfprivacy_api/backup/tasks.py +++ b/selfprivacy_api/backup/tasks.py @@ -16,6 +16,8 @@ from selfprivacy_api.backup import Backups from selfprivacy_api.jobs import Jobs, JobStatus, Job +SNAPSHOT_CACHE_TTL_HOURS = 6 + SNAPSHOT_CACHE_TTL_HOURS = 6 From 85c90105ea901a6f90207b90fa589ed29309200d Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 20 Sep 2023 13:17:57 +0000 Subject: [PATCH 116/246] test(backup): ensure we use correct repo folder --- selfprivacy_api/backup/backuppers/restic_backupper.py | 1 + tests/test_graphql/test_backup.py | 5 ++++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index fd653e6..b7ceb90 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -197,6 +197,7 @@ class ResticBackupper(AbstractBackupper): output, "parsed messages:", messages, + backup_command, ) from error @staticmethod diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 882e086..32b5d40 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -52,9 +52,11 @@ TESTFILE_BODY = "testytest!" TESTFILE_2_BODY = "testissimo!" REPO_NAME = "test_backup" +REPOFILE_NAME = "totallyunrelated" + def prepare_localfile_backups(temp_dir): - test_repo_path = path.join(temp_dir, "totallyunrelated") + test_repo_path = path.join(temp_dir, REPOFILE_NAME) assert not path.exists(test_repo_path) Backups.set_localfile_repo(test_repo_path) @@ -79,6 +81,7 @@ def backups(tmpdir): # assert not repo_path Backups.init_repo() + assert Backups.provider().location == str(tmpdir) + "/" + REPOFILE_NAME yield Backups.erase_repo() From 69a05de3d7d5c9060e6b7ba4ee01593c5e56abb4 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 20 Sep 2023 13:34:44 +0000 Subject: [PATCH 117/246] test(backup): ensure we actually call backup fixture and related resets --- tests/test_graphql/test_api_backup.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index b44fd44..9543f22 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -1,5 +1,6 @@ from os import path -from tests.test_graphql.test_backup import dummy_service, backups, raw_dummy_service +from tests.test_graphql.test_backup import backups +from tests.test_graphql.test_backup import raw_dummy_service, dummy_service from tests.common import generate_backup_query @@ -301,7 +302,7 @@ def test_dummy_service_convertible_to_gql(dummy_service): assert gql_service is not None -def test_snapshots_empty(authorized_client, dummy_service): +def test_snapshots_empty(authorized_client, dummy_service, backups): snaps = api_snapshots(authorized_client) assert snaps == [] @@ -370,7 +371,6 @@ def test_remove(authorized_client, generic_userdata, backups): assert len(configuration["encryptionKey"]) > 1 assert configuration["isInitialized"] is False - def test_autobackup_quotas_nonzero(authorized_client, backups): quotas = _AutobackupQuotas( last=3, From 6e9d86e8447f43d6dc1f1d4772b02d2c4092325d Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 20 Sep 2023 13:48:30 +0000 Subject: [PATCH 118/246] test(backup): break out dummy service fixture --- .../backup/backuppers/restic_backupper.py | 1 + tests/test_common.py | 53 +++++++++++++++++++ tests/test_graphql/test_backup.py | 50 +---------------- tests/test_services.py | 2 +- 4 files changed, 57 insertions(+), 49 deletions(-) diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index b7ceb90..0d74d9c 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -197,6 +197,7 @@ class ResticBackupper(AbstractBackupper): output, "parsed messages:", messages, + "command: ", backup_command, ) from error diff --git a/tests/test_common.py b/tests/test_common.py index e5d3f62..0bcd4bc 100644 --- a/tests/test_common.py +++ b/tests/test_common.py @@ -6,6 +6,59 @@ import pytest from selfprivacy_api.utils import WriteUserData, ReadUserData +from os import path +from os import makedirs +from typing import Generator + +# import pickle +import selfprivacy_api.services as services +from selfprivacy_api.services import get_service_by_id, Service +from selfprivacy_api.services.test_service import DummyService + + +TESTFILE_BODY = "testytest!" +TESTFILE_2_BODY = "testissimo!" + + +@pytest.fixture() +def raw_dummy_service(tmpdir): + dirnames = ["test_service", "also_test_service"] + service_dirs = [] + for d in dirnames: + service_dir = path.join(tmpdir, d) + makedirs(service_dir) + service_dirs.append(service_dir) + + testfile_path_1 = path.join(service_dirs[0], "testfile.txt") + with open(testfile_path_1, "w") as file: + file.write(TESTFILE_BODY) + + testfile_path_2 = path.join(service_dirs[1], "testfile2.txt") + with open(testfile_path_2, "w") as file: + file.write(TESTFILE_2_BODY) + + # we need this to not change get_folders() much + class TestDummyService(DummyService, folders=service_dirs): + pass + + service = TestDummyService() + # assert pickle.dumps(service) is not None + return service + + +@pytest.fixture() +def dummy_service(tmpdir, raw_dummy_service) -> Generator[Service, None, None]: + service = raw_dummy_service + + # register our service + services.services.append(service) + + assert get_service_by_id(service.get_id()) is not None + yield service + + # cleanup because apparently it matters wrt tasks + services.services.remove(service) + def test_get_api_version(authorized_client): response = authorized_client.get("/api/version") diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 32b5d40..187ce11 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -2,7 +2,6 @@ import pytest import os import os.path as path -from os import makedirs from os import remove from os import listdir from os import urandom @@ -15,7 +14,8 @@ from selfprivacy_api.utils.huey import huey import tempfile -import selfprivacy_api.services as services +from tests.test_common import dummy_service, raw_dummy_service + from selfprivacy_api.services import Service, get_all_services from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services.service import ServiceStatus @@ -48,8 +48,6 @@ from selfprivacy_api.backup.tasks import ( from selfprivacy_api.backup.storage import Storage -TESTFILE_BODY = "testytest!" -TESTFILE_2_BODY = "testissimo!" REPO_NAME = "test_backup" REPOFILE_NAME = "totallyunrelated" @@ -78,7 +76,6 @@ def backups(tmpdir): else: prepare_localfile_backups(tmpdir) Jobs.reset() - # assert not repo_path Backups.init_repo() assert Backups.provider().location == str(tmpdir) + "/" + REPOFILE_NAME @@ -91,49 +88,6 @@ def backups_backblaze(generic_userdata): Backups.reset(reset_json=False) -@pytest.fixture() -def raw_dummy_service(tmpdir): - dirnames = ["test_service", "also_test_service"] - service_dirs = [] - for d in dirnames: - service_dir = path.join(tmpdir, d) - makedirs(service_dir) - service_dirs.append(service_dir) - - testfile_path_1 = path.join(service_dirs[0], "testfile.txt") - with open(testfile_path_1, "w") as file: - file.write(TESTFILE_BODY) - - testfile_path_2 = path.join(service_dirs[1], "testfile2.txt") - with open(testfile_path_2, "w") as file: - file.write(TESTFILE_2_BODY) - - # we need this to not change get_folders() much - class TestDummyService(DummyService, folders=service_dirs): - pass - - service = TestDummyService() - return service - - -@pytest.fixture() -def dummy_service(tmpdir, backups, raw_dummy_service) -> Service: - service = raw_dummy_service - - # register our service - services.services.append(service) - - # make sure we are in immediate mode because this thing is non pickleable to store on queue. - huey.immediate = True - assert huey.immediate is True - - assert get_service_by_id(service.get_id()) is not None - yield service - - # cleanup because apparently it matters wrt tasks - services.services.remove(service) - - @pytest.fixture() def memory_backup() -> AbstractBackupProvider: ProviderClass = providers.get_provider(BackupProvider.MEMORY) diff --git a/tests/test_services.py b/tests/test_services.py index b83a7f2..3eef0cd 100644 --- a/tests/test_services.py +++ b/tests/test_services.py @@ -12,7 +12,7 @@ from selfprivacy_api.services.test_service import DummyService from selfprivacy_api.services.service import Service, ServiceStatus, StoppedService from selfprivacy_api.utils.waitloop import wait_until_true -from tests.test_graphql.test_backup import raw_dummy_service +from tests.test_common import raw_dummy_service def test_unimplemented_folders_raises(): From 7ef751db9874225c6f18ec81ce8b15811d33e64e Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 20 Sep 2023 14:28:32 +0000 Subject: [PATCH 119/246] test(services): break out graphql basics --- tests/test_graphql/test_api_backup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index 9543f22..bc4b7f1 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -371,6 +371,7 @@ def test_remove(authorized_client, generic_userdata, backups): assert len(configuration["encryptionKey"]) > 1 assert configuration["isInitialized"] is False + def test_autobackup_quotas_nonzero(authorized_client, backups): quotas = _AutobackupQuotas( last=3, From 87248c3f8c0c5a5f636c975cf7f326082e47d0fc Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 20 Sep 2023 14:29:06 +0000 Subject: [PATCH 120/246] test(services): add services query generator --- tests/common.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/common.py b/tests/common.py index 5199899..8061721 100644 --- a/tests/common.py +++ b/tests/common.py @@ -67,6 +67,10 @@ def generate_backup_query(query_array): return "query TestBackup {\n backup {" + "\n".join(query_array) + "}\n}" +def generate_service_query(query_array): + return "query TestService {\n services {" + "\n".join(query_array) + "}\n}" + + def mnemonic_to_hex(mnemonic): return Mnemonic(language="english").to_entropy(mnemonic).hex() From 9bf239c3a8cd494c717af9fe39a8af51b309b841 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 20 Sep 2023 14:30:18 +0000 Subject: [PATCH 121/246] test(services): disable usual services for testing --- tests/test_graphql/test_services.py | 106 ++++++++++++++++++++++++++++ 1 file changed, 106 insertions(+) create mode 100644 tests/test_graphql/test_services.py diff --git a/tests/test_graphql/test_services.py b/tests/test_graphql/test_services.py new file mode 100644 index 0000000..00b4633 --- /dev/null +++ b/tests/test_graphql/test_services.py @@ -0,0 +1,106 @@ +import pytest + +from selfprivacy_api.graphql.mutations.services_mutations import ServicesMutations +import selfprivacy_api.services as service_module +from selfprivacy_api.services.service import Service + +import tests.test_graphql.test_api_backup +from tests.test_common import raw_dummy_service, dummy_service +from tests.common import generate_service_query +from tests.test_graphql.common import get_data + + +@pytest.fixture() +def only_dummy_service(dummy_service): + # because queries to services that are not really there error out + back_copy = service_module.services.copy() + service_module.services.clear() + service_module.services.append(dummy_service) + yield dummy_service + service_module.services.clear() + service_module.services.extend(back_copy) + + + +API_START_MUTATION = """ +mutation TestStartService($service_id: String!) { + services { + startService(serviceId: $service_id) { + success + message + code + service { + id + status + } + } + } +} +""" + +API_STOP_MUTATION = """ +mutation TestStopService($service_id: String!) { + services { + stopService(serviceId: $service_id) { + success + message + code + service { + id + status + } + } + } +} + +""" +API_SERVICES_QUERY = """ +allServices { + id + status +} +""" + + +def api_start(client, service): + response = client.post( + "/graphql", + json={ + "query": API_START_MUTATION, + "variables": {"service_id": service.get_id()}, + }, + ) + return response + + +def api_stop(client, service): + response = client.post( + "/graphql", + json={ + "query": API_STOP_MUTATION, + "variables": {"service_id": service.get_id()}, + }, + ) + return response + + +def api_all_services(authorized_client): + response = authorized_client.post( + "/graphql", + json={"query": generate_service_query([API_SERVICES_QUERY])}, + ) + data = get_data(response) + result = data["services"]["allServices"] + assert result is not None + return result + + +def api_service(authorized_client, service: Service): + id = service.get_id() + for _service in api_all_services(authorized_client): + if _service["id"] == id: + return _service + + +def test_get_services(authorized_client, only_dummy_service): + assert len(api_all_services(authorized_client)) == 1 From 7808033bef47d782f610eb2f805607c11cb66ab5 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 29 Sep 2023 11:52:26 +0000 Subject: [PATCH 122/246] test(services): check id and status --- tests/test_graphql/test_services.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/tests/test_graphql/test_services.py b/tests/test_graphql/test_services.py index 00b4633..93fe682 100644 --- a/tests/test_graphql/test_services.py +++ b/tests/test_graphql/test_services.py @@ -2,7 +2,7 @@ import pytest from selfprivacy_api.graphql.mutations.services_mutations import ServicesMutations import selfprivacy_api.services as service_module -from selfprivacy_api.services.service import Service +from selfprivacy_api.services.service import Service, ServiceStatus import tests.test_graphql.test_api_backup from tests.test_common import raw_dummy_service, dummy_service @@ -21,7 +21,6 @@ def only_dummy_service(dummy_service): service_module.services.extend(back_copy) - API_START_MUTATION = """ mutation TestStartService($service_id: String!) { services { @@ -103,4 +102,9 @@ def api_service(authorized_client, service: Service): def test_get_services(authorized_client, only_dummy_service): - assert len(api_all_services(authorized_client)) == 1 + services = api_all_services(authorized_client) + assert len(services) == 1 + + api_dummy_service = services[0] + assert api_dummy_service["id"] == "testservice" + assert api_dummy_service["status"] == ServiceStatus.ACTIVE.value From 728ea44823c42ee2d46cf344f8e56ab6323cb0fa Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 29 Sep 2023 12:30:44 +0000 Subject: [PATCH 123/246] test(service): startstop --- tests/test_graphql/test_services.py | 29 +++++++++++++++++++++++++++-- 1 file changed, 27 insertions(+), 2 deletions(-) diff --git a/tests/test_graphql/test_services.py b/tests/test_graphql/test_services.py index 93fe682..ea1f272 100644 --- a/tests/test_graphql/test_services.py +++ b/tests/test_graphql/test_services.py @@ -61,7 +61,7 @@ allServices { """ -def api_start(client, service): +def api_start(client, service: Service): response = client.post( "/graphql", json={ @@ -72,7 +72,7 @@ def api_start(client, service): return response -def api_stop(client, service): +def api_stop(client, service: Service): response = client.post( "/graphql", json={ @@ -108,3 +108,28 @@ def test_get_services(authorized_client, only_dummy_service): api_dummy_service = services[0] assert api_dummy_service["id"] == "testservice" assert api_dummy_service["status"] == ServiceStatus.ACTIVE.value + + +def test_stop_start(authorized_client, only_dummy_service): + dummy_service = only_dummy_service + + api_dummy_service = api_all_services(authorized_client)[0] + assert api_dummy_service["status"] == ServiceStatus.ACTIVE.value + + # attempting to start an already started service + api_start(authorized_client, dummy_service) + api_dummy_service = api_all_services(authorized_client)[0] + assert api_dummy_service["status"] == ServiceStatus.ACTIVE.value + + api_stop(authorized_client, dummy_service) + api_dummy_service = api_all_services(authorized_client)[0] + assert api_dummy_service["status"] == ServiceStatus.INACTIVE.value + + # attempting to stop an already stopped service + api_stop(authorized_client, dummy_service) + api_dummy_service = api_all_services(authorized_client)[0] + assert api_dummy_service["status"] == ServiceStatus.INACTIVE.value + + api_start(authorized_client, dummy_service) + api_dummy_service = api_all_services(authorized_client)[0] + assert api_dummy_service["status"] == ServiceStatus.ACTIVE.value From 92612906efc05913417a84937b246311a4c23153 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 29 Sep 2023 12:52:13 +0000 Subject: [PATCH 124/246] test(service): enabled status get --- .../services/test_service/__init__.py | 25 +++++++++++++++++-- tests/test_graphql/test_services.py | 2 ++ 2 files changed, 25 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/services/test_service/__init__.py b/selfprivacy_api/services/test_service/__init__.py index 187a1c6..3baf193 100644 --- a/selfprivacy_api/services/test_service/__init__.py +++ b/selfprivacy_api/services/test_service/__init__.py @@ -30,9 +30,10 @@ class DummyService(Service): def __init__(self): super().__init__() - status_file = self.status_file() - with open(status_file, "w") as file: + with open(self.status_file(), "w") as file: file.write(ServiceStatus.ACTIVE.value) + with open(self.enabled_file(), "w") as file: + file.write("True") @staticmethod def get_id() -> str: @@ -83,6 +84,26 @@ class DummyService(Service): # we do not REALLY want to store our state in our declared folders return path.join(dir, "..", "service_status") + @classmethod + def enabled_file(cls) -> str: + dir = cls.folders[0] + return path.join(dir, "..", "service_enabled") + + @classmethod + def get_enabled(cls) -> bool: + with open(cls.enabled_file(), "r") as file: + string = file.read().strip() + if "True" in string: + return True + if "False" in string: + return False + raise ValueError("test service enabled/disabled status file got corrupted") + + @classmethod + def set_enabled(cls, enabled: bool): + with open(cls.enabled_file(), "w") as file: + status_string = file.write(str(enabled)) + @classmethod def set_status(cls, status: ServiceStatus): with open(cls.status_file(), "w") as file: diff --git a/tests/test_graphql/test_services.py b/tests/test_graphql/test_services.py index ea1f272..efd86d8 100644 --- a/tests/test_graphql/test_services.py +++ b/tests/test_graphql/test_services.py @@ -57,6 +57,7 @@ API_SERVICES_QUERY = """ allServices { id status + isEnabled } """ @@ -108,6 +109,7 @@ def test_get_services(authorized_client, only_dummy_service): api_dummy_service = services[0] assert api_dummy_service["id"] == "testservice" assert api_dummy_service["status"] == ServiceStatus.ACTIVE.value + assert api_dummy_service["isEnabled"] is True def test_stop_start(authorized_client, only_dummy_service): From 47cfaad160b817ed2c3d92467e3c85938b2c6687 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 29 Sep 2023 13:46:22 +0000 Subject: [PATCH 125/246] test(service): startstop return values --- tests/test_graphql/test_services.py | 26 +++++++++++++++++++++++--- 1 file changed, 23 insertions(+), 3 deletions(-) diff --git a/tests/test_graphql/test_services.py b/tests/test_graphql/test_services.py index efd86d8..85090cf 100644 --- a/tests/test_graphql/test_services.py +++ b/tests/test_graphql/test_services.py @@ -7,7 +7,7 @@ from selfprivacy_api.services.service import Service, ServiceStatus import tests.test_graphql.test_api_backup from tests.test_common import raw_dummy_service, dummy_service from tests.common import generate_service_query -from tests.test_graphql.common import get_data +from tests.test_graphql.common import assert_ok, get_data @pytest.fixture() @@ -62,7 +62,7 @@ allServices { """ -def api_start(client, service: Service): +def api_start(client, service: Service) -> dict: response = client.post( "/graphql", json={ @@ -73,7 +73,7 @@ def api_start(client, service: Service): return response -def api_stop(client, service: Service): +def api_stop(client, service: Service) -> dict: response = client.post( "/graphql", json={ @@ -112,6 +112,26 @@ def test_get_services(authorized_client, only_dummy_service): assert api_dummy_service["isEnabled"] is True +def test_start_return_value(authorized_client, only_dummy_service): + dummy_service = only_dummy_service + mutation_response = api_start(authorized_client, dummy_service) + data = get_data(mutation_response)["services"]["startService"] + assert_ok(data) + service = data["service"] + assert service["id"] == dummy_service.get_id() + assert service["status"] == ServiceStatus.ACTIVE.value + + +def test_stop_return_value(authorized_client, only_dummy_service): + dummy_service = only_dummy_service + mutation_response = api_stop(authorized_client, dummy_service) + data = get_data(mutation_response)["services"]["stopService"] + assert_ok(data) + service = data["service"] + assert service["id"] == dummy_service.get_id() + assert service["status"] == ServiceStatus.INACTIVE.value + + def test_stop_start(authorized_client, only_dummy_service): dummy_service = only_dummy_service From bd43bdb33579581770c3f1b44b89606b7be385ca Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 29 Sep 2023 14:03:01 +0000 Subject: [PATCH 126/246] test(service): breakout raw api calls with ids --- tests/test_graphql/test_services.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/tests/test_graphql/test_services.py b/tests/test_graphql/test_services.py index 85090cf..c9b909b 100644 --- a/tests/test_graphql/test_services.py +++ b/tests/test_graphql/test_services.py @@ -63,22 +63,30 @@ allServices { def api_start(client, service: Service) -> dict: + return api_start_by_name(client, service.get_id()) + + +def api_start_by_name(client, service_id: str) -> dict: response = client.post( "/graphql", json={ "query": API_START_MUTATION, - "variables": {"service_id": service.get_id()}, + "variables": {"service_id": service_id}, }, ) return response def api_stop(client, service: Service) -> dict: + return api_stop_by_name(client, service.get_id()) + + +def api_stop_by_name(client, service_id: str) -> dict: response = client.post( "/graphql", json={ "query": API_STOP_MUTATION, - "variables": {"service_id": service.get_id()}, + "variables": {"service_id": service_id}, }, ) return response From b9f3aa49bd8a68406b6db8bc26f31fed6748e488 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 29 Sep 2023 15:27:34 +0000 Subject: [PATCH 127/246] test(service): enable-disable return values --- .../services/test_service/__init__.py | 12 +-- tests/test_graphql/test_services.py | 81 +++++++++++++++++++ 2 files changed, 87 insertions(+), 6 deletions(-) diff --git a/selfprivacy_api/services/test_service/__init__.py b/selfprivacy_api/services/test_service/__init__.py index 3baf193..60aea3b 100644 --- a/selfprivacy_api/services/test_service/__init__.py +++ b/selfprivacy_api/services/test_service/__init__.py @@ -74,9 +74,9 @@ class DummyService(Service): def get_backup_description() -> str: return "How did we get here?" - @staticmethod - def is_enabled() -> bool: - return True + @classmethod + def is_enabled(cls) -> bool: + return cls.get_enabled() @classmethod def status_file(cls) -> str: @@ -144,11 +144,11 @@ class DummyService(Service): @classmethod def enable(cls): - pass + cls.set_enabled(True) @classmethod - def disable(cls, delay): - pass + def disable(cls): + cls.set_enabled(False) @classmethod def set_delay(cls, new_delay): diff --git a/tests/test_graphql/test_services.py b/tests/test_graphql/test_services.py index c9b909b..bc9eab0 100644 --- a/tests/test_graphql/test_services.py +++ b/tests/test_graphql/test_services.py @@ -37,6 +37,37 @@ mutation TestStartService($service_id: String!) { } """ +API_ENABLE_MUTATION = """ +mutation TestStartService($service_id: String!) { + services { + enableService(serviceId: $service_id) { + success + message + code + service { + id + isEnabled + } + } + } +} +""" +API_DISABLE_MUTATION = """ +mutation TestStartService($service_id: String!) { + services { + disableService(serviceId: $service_id) { + success + message + code + service { + id + isEnabled + } + } + } +} +""" + API_STOP_MUTATION = """ mutation TestStopService($service_id: String!) { services { @@ -62,6 +93,36 @@ allServices { """ +def api_enable(client, service: Service) -> dict: + return api_enable_by_name(client, service.get_id()) + + +def api_enable_by_name(client, service_id: str) -> dict: + response = client.post( + "/graphql", + json={ + "query": API_ENABLE_MUTATION, + "variables": {"service_id": service_id}, + }, + ) + return response + + +def api_disable(client, service: Service) -> dict: + return api_disable_by_name(client, service.get_id()) + + +def api_disable_by_name(client, service_id: str) -> dict: + response = client.post( + "/graphql", + json={ + "query": API_DISABLE_MUTATION, + "variables": {"service_id": service_id}, + }, + ) + return response + + def api_start(client, service: Service) -> dict: return api_start_by_name(client, service.get_id()) @@ -120,6 +181,26 @@ def test_get_services(authorized_client, only_dummy_service): assert api_dummy_service["isEnabled"] is True +def test_enable_return_value(authorized_client, only_dummy_service): + dummy_service = only_dummy_service + mutation_response = api_enable(authorized_client, dummy_service) + data = get_data(mutation_response)["services"]["enableService"] + assert_ok(data) + service = data["service"] + assert service["id"] == dummy_service.get_id() + assert service["isEnabled"] == True + + +def test_disable_return_value(authorized_client, only_dummy_service): + dummy_service = only_dummy_service + mutation_response = api_disable(authorized_client, dummy_service) + data = get_data(mutation_response)["services"]["disableService"] + assert_ok(data) + service = data["service"] + assert service["id"] == dummy_service.get_id() + assert service["isEnabled"] == False + + def test_start_return_value(authorized_client, only_dummy_service): dummy_service = only_dummy_service mutation_response = api_start(authorized_client, dummy_service) From a1637181202104976f7418b09302ea45ad670566 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 29 Sep 2023 15:50:40 +0000 Subject: [PATCH 128/246] test(service): start nonexistent service --- tests/test_graphql/test_services.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/tests/test_graphql/test_services.py b/tests/test_graphql/test_services.py index bc9eab0..712b11b 100644 --- a/tests/test_graphql/test_services.py +++ b/tests/test_graphql/test_services.py @@ -93,6 +93,16 @@ allServices { """ +def assert_notfound(data): + assert_errorcode(data, 404) + + +def assert_errorcode(data, errorcode): + assert data["code"] == errorcode + assert data["success"] is False + assert data["message"] is not None + + def api_enable(client, service: Service) -> dict: return api_enable_by_name(client, service.get_id()) @@ -221,6 +231,15 @@ def test_stop_return_value(authorized_client, only_dummy_service): assert service["status"] == ServiceStatus.INACTIVE.value +def test_start_nonexistent(authorized_client, only_dummy_service): + dummy_service = only_dummy_service + mutation_response = api_start_by_name(authorized_client, "bogus_service") + data = get_data(mutation_response)["services"]["startService"] + assert_notfound(data) + + assert data["service"] is None + + def test_stop_start(authorized_client, only_dummy_service): dummy_service = only_dummy_service From bfdd98cb60bb9431fdaf77785c8c78f4e647a6a7 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 29 Sep 2023 15:55:30 +0000 Subject: [PATCH 129/246] test(service): stop nonexistent service --- tests/test_graphql/test_services.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tests/test_graphql/test_services.py b/tests/test_graphql/test_services.py index 712b11b..eb9f591 100644 --- a/tests/test_graphql/test_services.py +++ b/tests/test_graphql/test_services.py @@ -240,6 +240,15 @@ def test_start_nonexistent(authorized_client, only_dummy_service): assert data["service"] is None +def test_stop_nonexistent(authorized_client, only_dummy_service): + dummy_service = only_dummy_service + mutation_response = api_stop_by_name(authorized_client, "bogus_service") + data = get_data(mutation_response)["services"]["stopService"] + assert_notfound(data) + + assert data["service"] is None + + def test_stop_start(authorized_client, only_dummy_service): dummy_service = only_dummy_service From 34782a3ca8cb680e519342f1fccbd6be8c9f21cf Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 29 Sep 2023 16:02:09 +0000 Subject: [PATCH 130/246] test(service): enable nonexistent service --- tests/test_graphql/test_services.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tests/test_graphql/test_services.py b/tests/test_graphql/test_services.py index eb9f591..95d1693 100644 --- a/tests/test_graphql/test_services.py +++ b/tests/test_graphql/test_services.py @@ -249,6 +249,15 @@ def test_stop_nonexistent(authorized_client, only_dummy_service): assert data["service"] is None +def test_enable_nonexistent(authorized_client, only_dummy_service): + dummy_service = only_dummy_service + mutation_response = api_enable_by_name(authorized_client, "bogus_service") + data = get_data(mutation_response)["services"]["enableService"] + assert_notfound(data) + + assert data["service"] is None + + def test_stop_start(authorized_client, only_dummy_service): dummy_service = only_dummy_service From 6d244fb603e591da54a9beabc1c2c6deefaa6820 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 29 Sep 2023 16:08:28 +0000 Subject: [PATCH 131/246] test(service): disable nonexistent service --- tests/test_graphql/test_services.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tests/test_graphql/test_services.py b/tests/test_graphql/test_services.py index 95d1693..94e8c69 100644 --- a/tests/test_graphql/test_services.py +++ b/tests/test_graphql/test_services.py @@ -258,6 +258,15 @@ def test_enable_nonexistent(authorized_client, only_dummy_service): assert data["service"] is None +def test_disable_nonexistent(authorized_client, only_dummy_service): + dummy_service = only_dummy_service + mutation_response = api_disable_by_name(authorized_client, "bogus_service") + data = get_data(mutation_response)["services"]["disableService"] + assert_notfound(data) + + assert data["service"] is None + + def test_stop_start(authorized_client, only_dummy_service): dummy_service = only_dummy_service From 83c639596c0089ded4ca0783120a803b132d9f3f Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 29 Sep 2023 16:20:39 +0000 Subject: [PATCH 132/246] test(service): start service unauthorized --- tests/test_graphql/test_services.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/tests/test_graphql/test_services.py b/tests/test_graphql/test_services.py index 94e8c69..d6f5c4e 100644 --- a/tests/test_graphql/test_services.py +++ b/tests/test_graphql/test_services.py @@ -231,6 +231,14 @@ def test_stop_return_value(authorized_client, only_dummy_service): assert service["status"] == ServiceStatus.INACTIVE.value +def test_start_unauthorized(client, only_dummy_service): + dummy_service = only_dummy_service + mutation_response = api_start(client, dummy_service) + + assert mutation_response.status_code == 200 + assert mutation_response.json().get("data") is None + + def test_start_nonexistent(authorized_client, only_dummy_service): dummy_service = only_dummy_service mutation_response = api_start_by_name(authorized_client, "bogus_service") From b06f1a4153fb4e9795bb604e07b49f9fa15e7279 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 29 Sep 2023 16:30:41 +0000 Subject: [PATCH 133/246] test(service): other unauthorized mutations --- tests/test_graphql/test_services.py | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/tests/test_graphql/test_services.py b/tests/test_graphql/test_services.py index d6f5c4e..4a170cf 100644 --- a/tests/test_graphql/test_services.py +++ b/tests/test_graphql/test_services.py @@ -239,6 +239,30 @@ def test_start_unauthorized(client, only_dummy_service): assert mutation_response.json().get("data") is None +def test_stop_unauthorized(client, only_dummy_service): + dummy_service = only_dummy_service + mutation_response = api_stop(client, dummy_service) + + assert mutation_response.status_code == 200 + assert mutation_response.json().get("data") is None + + +def test_enable_unauthorized(client, only_dummy_service): + dummy_service = only_dummy_service + mutation_response = api_enable(client, dummy_service) + + assert mutation_response.status_code == 200 + assert mutation_response.json().get("data") is None + + +def test_disable_unauthorized(client, only_dummy_service): + dummy_service = only_dummy_service + mutation_response = api_disable(client, dummy_service) + + assert mutation_response.status_code == 200 + assert mutation_response.json().get("data") is None + + def test_start_nonexistent(authorized_client, only_dummy_service): dummy_service = only_dummy_service mutation_response = api_start_by_name(authorized_client, "bogus_service") From 018a8ce24886256996d7eccd606db747dd3d6910 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 29 Sep 2023 16:50:16 +0000 Subject: [PATCH 134/246] test(service): an unauthorized query --- tests/test_graphql/test_services.py | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/tests/test_graphql/test_services.py b/tests/test_graphql/test_services.py index 4a170cf..fcd2b85 100644 --- a/tests/test_graphql/test_services.py +++ b/tests/test_graphql/test_services.py @@ -164,16 +164,20 @@ def api_stop_by_name(client, service_id: str) -> dict: def api_all_services(authorized_client): - response = authorized_client.post( - "/graphql", - json={"query": generate_service_query([API_SERVICES_QUERY])}, - ) + response = api_all_services_raw(authorized_client) data = get_data(response) result = data["services"]["allServices"] assert result is not None return result +def api_all_services_raw(client): + return client.post( + "/graphql", + json={"query": generate_service_query([API_SERVICES_QUERY])}, + ) + + def api_service(authorized_client, service: Service): id = service.get_id() for _service in api_all_services(authorized_client): @@ -231,6 +235,14 @@ def test_stop_return_value(authorized_client, only_dummy_service): assert service["status"] == ServiceStatus.INACTIVE.value +def test_allservices_unauthorized(client, only_dummy_service): + dummy_service = only_dummy_service + response = api_all_services_raw(client) + + assert response.status_code == 200 + assert response.json().get("data") is None + + def test_start_unauthorized(client, only_dummy_service): dummy_service = only_dummy_service mutation_response = api_start(client, dummy_service) From 1e77129f4fa0b0ceaea76ea3944a4a2d92a225d0 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 6 Oct 2023 10:45:46 +0000 Subject: [PATCH 135/246] test(service): restart --- .../services/test_service/__init__.py | 6 +- tests/test_graphql/test_services.py | 63 ++++++++++++++++++- 2 files changed, 65 insertions(+), 4 deletions(-) diff --git a/selfprivacy_api/services/test_service/__init__.py b/selfprivacy_api/services/test_service/__init__.py index 60aea3b..e4ed4cc 100644 --- a/selfprivacy_api/services/test_service/__init__.py +++ b/selfprivacy_api/services/test_service/__init__.py @@ -22,7 +22,7 @@ class DummyService(Service): """A test service""" folders: List[str] = [] - startstop_delay = 0 + startstop_delay = 0.0 backuppable = True def __init_subclass__(cls, folders: List[str]): @@ -151,8 +151,8 @@ class DummyService(Service): cls.set_enabled(False) @classmethod - def set_delay(cls, new_delay): - cls.startstop_delay = new_delay + def set_delay(cls, new_delay_sec: float) -> None: + cls.startstop_delay = new_delay_sec @classmethod def stop(cls): diff --git a/tests/test_graphql/test_services.py b/tests/test_graphql/test_services.py index fcd2b85..0b652c5 100644 --- a/tests/test_graphql/test_services.py +++ b/tests/test_graphql/test_services.py @@ -1,8 +1,10 @@ import pytest +from typing import Generator from selfprivacy_api.graphql.mutations.services_mutations import ServicesMutations import selfprivacy_api.services as service_module from selfprivacy_api.services.service import Service, ServiceStatus +from selfprivacy_api.services.test_service import DummyService import tests.test_graphql.test_api_backup from tests.test_common import raw_dummy_service, dummy_service @@ -11,7 +13,7 @@ from tests.test_graphql.common import assert_ok, get_data @pytest.fixture() -def only_dummy_service(dummy_service): +def only_dummy_service(dummy_service) -> Generator[DummyService, None, None]: # because queries to services that are not really there error out back_copy = service_module.services.copy() service_module.services.clear() @@ -37,6 +39,22 @@ mutation TestStartService($service_id: String!) { } """ +API_RESTART_MUTATION = """ +mutation TestRestartService($service_id: String!) { + services { + restartService(serviceId: $service_id) { + success + message + code + service { + id + status + } + } + } +} +""" + API_ENABLE_MUTATION = """ mutation TestStartService($service_id: String!) { services { @@ -148,6 +166,21 @@ def api_start_by_name(client, service_id: str) -> dict: return response +def api_restart(client, service: Service) -> dict: + return api_restart_by_name(client, service.get_id()) + + +def api_restart_by_name(client, service_id: str) -> dict: + response = client.post( + "/graphql", + json={ + "query": API_RESTART_MUTATION, + "variables": {"service_id": service_id}, + }, + ) + return response + + def api_stop(client, service: Service) -> dict: return api_stop_by_name(client, service.get_id()) @@ -225,6 +258,17 @@ def test_start_return_value(authorized_client, only_dummy_service): assert service["status"] == ServiceStatus.ACTIVE.value +def test_restart(authorized_client, only_dummy_service): + dummy_service = only_dummy_service + dummy_service.set_delay(0.3) + mutation_response = api_restart(authorized_client, dummy_service) + data = get_data(mutation_response)["services"]["restartService"] + assert_ok(data) + service = data["service"] + assert service["id"] == dummy_service.get_id() + assert service["status"] == ServiceStatus.RELOADING.value + + def test_stop_return_value(authorized_client, only_dummy_service): dummy_service = only_dummy_service mutation_response = api_stop(authorized_client, dummy_service) @@ -251,6 +295,14 @@ def test_start_unauthorized(client, only_dummy_service): assert mutation_response.json().get("data") is None +def test_restart_unauthorized(client, only_dummy_service): + dummy_service = only_dummy_service + mutation_response = api_restart(client, dummy_service) + + assert mutation_response.status_code == 200 + assert mutation_response.json().get("data") is None + + def test_stop_unauthorized(client, only_dummy_service): dummy_service = only_dummy_service mutation_response = api_stop(client, dummy_service) @@ -284,6 +336,15 @@ def test_start_nonexistent(authorized_client, only_dummy_service): assert data["service"] is None +def test_restart_nonexistent(authorized_client, only_dummy_service): + dummy_service = only_dummy_service + mutation_response = api_restart_by_name(authorized_client, "bogus_service") + data = get_data(mutation_response)["services"]["restartService"] + assert_notfound(data) + + assert data["service"] is None + + def test_stop_nonexistent(authorized_client, only_dummy_service): dummy_service = only_dummy_service mutation_response = api_stop_by_name(authorized_client, "bogus_service") From 9a3800ac7bc4aa0765cffcade144cefa3684fbe3 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 6 Oct 2023 13:17:48 +0000 Subject: [PATCH 136/246] test(service): moving errors --- selfprivacy_api/services/service.py | 2 + .../services/test_service/__init__.py | 13 +++- tests/test_graphql/test_services.py | 73 +++++++++++++++++++ 3 files changed, 85 insertions(+), 3 deletions(-) diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index 8446133..fbe0007 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -247,6 +247,8 @@ class Service(ABC): @abstractmethod def move_to_volume(self, volume: BlockDevice) -> Job: + """Cannot raise errors. + Returns errors as an errored out Job instead.""" pass @classmethod diff --git a/selfprivacy_api/services/test_service/__init__.py b/selfprivacy_api/services/test_service/__init__.py index e4ed4cc..1cb5d9f 100644 --- a/selfprivacy_api/services/test_service/__init__.py +++ b/selfprivacy_api/services/test_service/__init__.py @@ -24,6 +24,7 @@ class DummyService(Service): folders: List[str] = [] startstop_delay = 0.0 backuppable = True + movable = True def __init_subclass__(cls, folders: List[str]): cls.folders = folders @@ -62,9 +63,9 @@ class DummyService(Service): domain = "test.com" return f"https://password.{domain}" - @staticmethod - def is_movable() -> bool: - return True + @classmethod + def is_movable(cls) -> bool: + return cls.movable @staticmethod def is_required() -> bool: @@ -137,6 +138,12 @@ class DummyService(Service): we can only set it up dynamically for tests via a classmethod""" cls.backuppable = new_value + @classmethod + def set_movable(cls, new_value: bool) -> None: + """For tests: because is_movale is static, + we can only set it up dynamically for tests via a classmethod""" + cls.movable = new_value + @classmethod def can_be_backed_up(cls) -> bool: """`True` if the service can be backed up.""" diff --git a/tests/test_graphql/test_services.py b/tests/test_graphql/test_services.py index 0b652c5..df409b9 100644 --- a/tests/test_graphql/test_services.py +++ b/tests/test_graphql/test_services.py @@ -110,6 +110,26 @@ allServices { } """ +API_MOVE_MUTATION = """ +mutation TestMoveService($input: MoveServiceInput!) { + services { + moveService(input: $input) { + success + message + code + job { + uid + status + } + service { + id + status + } + } + } +} +""" + def assert_notfound(data): assert_errorcode(data, 404) @@ -166,6 +186,26 @@ def api_start_by_name(client, service_id: str) -> dict: return response +def api_move(client, service: Service, location: str) -> dict: + return api_move_by_name(client, service.get_id(), location) + + +def api_move_by_name(client, service_id: str, location: str) -> dict: + response = client.post( + "/graphql", + json={ + "query": API_MOVE_MUTATION, + "variables": { + "input": { + "serviceId": service_id, + "location": location, + } + }, + }, + ) + return response + + def api_restart(client, service: Service) -> dict: return api_restart_by_name(client, service.get_id()) @@ -327,6 +367,16 @@ def test_disable_unauthorized(client, only_dummy_service): assert mutation_response.json().get("data") is None +def test_move_nonexistent(authorized_client, only_dummy_service): + dummy_service = only_dummy_service + mutation_response = api_move_by_name(authorized_client, "bogus_service", "sda1") + data = get_data(mutation_response)["services"]["moveService"] + assert_notfound(data) + + assert data["service"] is None + assert data["job"] is None + + def test_start_nonexistent(authorized_client, only_dummy_service): dummy_service = only_dummy_service mutation_response = api_start_by_name(authorized_client, "bogus_service") @@ -395,3 +445,26 @@ def test_stop_start(authorized_client, only_dummy_service): api_start(authorized_client, dummy_service) api_dummy_service = api_all_services(authorized_client)[0] assert api_dummy_service["status"] == ServiceStatus.ACTIVE.value + + +def test_move_immovable(authorized_client, only_dummy_service): + dummy_service = only_dummy_service + dummy_service.set_movable(False) + mutation_response = api_move(authorized_client, dummy_service, "sda1") + data = get_data(mutation_response)["services"]["moveService"] + assert_errorcode(data, 400) + + # is there a meaning in returning the service in this? + assert data["service"] is not None + assert data["job"] is None + + +def test_move_no_such_volume(authorized_client, only_dummy_service): + dummy_service = only_dummy_service + mutation_response = api_move(authorized_client, dummy_service, "bogus_volume") + data = get_data(mutation_response)["services"]["moveService"] + assert_notfound(data) + + # is there a meaning in returning the service in this? + assert data["service"] is not None + assert data["job"] is None From 9d7857cb3fc6550d482b952eff764586ea8fb3c0 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 9 Oct 2023 19:22:43 +0000 Subject: [PATCH 137/246] fix(blockdevices): handle nested lsblk --- selfprivacy_api/utils/block_devices.py | 132 ++++++++++++++----------- tests/test_block_device_utils.py | 86 +++++++++++----- 2 files changed, 132 insertions(+), 86 deletions(-) diff --git a/selfprivacy_api/utils/block_devices.py b/selfprivacy_api/utils/block_devices.py index 83fc28f..ab3794d 100644 --- a/selfprivacy_api/utils/block_devices.py +++ b/selfprivacy_api/utils/block_devices.py @@ -1,4 +1,5 @@ -"""Wrapper for block device functions.""" +"""A block device API wrapping lsblk""" +from __future__ import annotations import subprocess import json import typing @@ -11,6 +12,7 @@ def get_block_device(device_name): """ Return a block device by name. """ + # TODO: remove the function and related tests: dublicated by singleton lsblk_output = subprocess.check_output( [ "lsblk", @@ -43,22 +45,37 @@ class BlockDevice: A block device. """ - def __init__(self, block_device): - self.name = block_device["name"] - self.path = block_device["path"] - self.fsavail = str(block_device["fsavail"]) - self.fssize = str(block_device["fssize"]) - self.fstype = block_device["fstype"] - self.fsused = str(block_device["fsused"]) - self.mountpoints = block_device["mountpoints"] - self.label = block_device["label"] - self.uuid = block_device["uuid"] - self.size = str(block_device["size"]) - self.model = block_device["model"] - self.serial = block_device["serial"] - self.type = block_device["type"] + def __init__(self, device_dict: dict): + self.update_from_dict(device_dict) + + def update_from_dict(self, device_dict: dict): + self.name = device_dict["name"] + self.path = device_dict["path"] + self.fsavail = str(device_dict["fsavail"]) + self.fssize = str(device_dict["fssize"]) + self.fstype = device_dict["fstype"] + self.fsused = str(device_dict["fsused"]) + self.mountpoints = device_dict["mountpoints"] + self.label = device_dict["label"] + self.uuid = device_dict["uuid"] + self.size = str(device_dict["size"]) + self.model = device_dict["model"] + self.serial = device_dict["serial"] + self.type = device_dict["type"] self.locked = False + self.children: typing.List[BlockDevice] = [] + if "children" in device_dict.keys(): + for child in device_dict["children"]: + self.children.append(BlockDevice(child)) + + def all_children(self) -> typing.List[BlockDevice]: + result = [] + for child in self.children: + result.extend(child.all_children()) + result.append(child) + return result + def __str__(self): return self.name @@ -82,17 +99,7 @@ class BlockDevice: Update current data and return a dictionary of stats. """ device = get_block_device(self.name) - self.fsavail = str(device["fsavail"]) - self.fssize = str(device["fssize"]) - self.fstype = device["fstype"] - self.fsused = str(device["fsused"]) - self.mountpoints = device["mountpoints"] - self.label = device["label"] - self.uuid = device["uuid"] - self.size = str(device["size"]) - self.model = device["model"] - self.serial = device["serial"] - self.type = device["type"] + self.update_from_dict(device) return { "name": self.name, @@ -110,6 +117,14 @@ class BlockDevice: "type": self.type, } + def is_usable_partition(self): + # Ignore devices with type "rom" + if self.type == "rom": + return False + if self.fstype == "ext4": + return True + return False + def resize(self): """ Resize the block device. @@ -165,41 +180,16 @@ class BlockDevices(metaclass=SingletonMetaclass): """ Update the list of block devices. """ - devices = [] - lsblk_output = subprocess.check_output( - [ - "lsblk", - "-J", - "-b", - "-o", - "NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINTS,LABEL,UUID,SIZE,MODEL,SERIAL,TYPE", - ] - ) - lsblk_output = lsblk_output.decode("utf-8") - lsblk_output = json.loads(lsblk_output) - for device in lsblk_output["blockdevices"]: - # Ignore devices with type "rom" - if device["type"] == "rom": - continue - # Ignore iso9660 devices - if device["fstype"] == "iso9660": - continue - if device["fstype"] is None: - if "children" in device: - for child in device["children"]: - if child["fstype"] == "ext4": - device = child - break - devices.append(device) - # Add new devices and delete non-existent devices + devices = BlockDevices.lsblk_devices() + + children = [] for device in devices: - if device["name"] not in [ - block_device.name for block_device in self.block_devices - ]: - self.block_devices.append(BlockDevice(device)) - for block_device in self.block_devices: - if block_device.name not in [device["name"] for device in devices]: - self.block_devices.remove(block_device) + children.extend(device.all_children()) + devices.extend(children) + + valid_devices = [device for device in devices if device.is_usable_partition()] + + self.block_devices = valid_devices def get_block_device(self, name: str) -> typing.Optional[BlockDevice]: """ @@ -236,3 +226,25 @@ class BlockDevices(metaclass=SingletonMetaclass): if "/" in block_device.mountpoints: return block_device raise RuntimeError("No root block device found") + + @staticmethod + def lsblk_device_dicts() -> typing.List[dict]: + lsblk_output_bytes = subprocess.check_output( + [ + "lsblk", + "-J", + "-b", + "-o", + "NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINTS,LABEL,UUID,SIZE,MODEL,SERIAL,TYPE", + ] + ) + lsblk_output = lsblk_output_bytes.decode("utf-8") + return json.loads(lsblk_output)["blockdevices"] + + @staticmethod + def lsblk_devices() -> typing.List[BlockDevice]: + devices = [] + for device in BlockDevices.lsblk_device_dicts(): + devices.append(device) + + return [BlockDevice(device) for device in devices] diff --git a/tests/test_block_device_utils.py b/tests/test_block_device_utils.py index f821e96..b41c89e 100644 --- a/tests/test_block_device_utils.py +++ b/tests/test_block_device_utils.py @@ -13,6 +13,7 @@ from selfprivacy_api.utils.block_devices import ( resize_block_device, ) from tests.common import read_json +from tests.test_common import dummy_service, raw_dummy_service SINGLE_LSBLK_OUTPUT = b""" { @@ -416,32 +417,37 @@ def lsblk_full_mock(mocker): def test_get_block_devices(lsblk_full_mock, authorized_client): block_devices = BlockDevices().get_block_devices() assert len(block_devices) == 2 - assert block_devices[0].name == "sda1" - assert block_devices[0].path == "/dev/sda1" - assert block_devices[0].fsavail == "4605702144" - assert block_devices[0].fssize == "19814920192" - assert block_devices[0].fstype == "ext4" - assert block_devices[0].fsused == "14353719296" - assert block_devices[0].mountpoints == ["/nix/store", "/"] - assert block_devices[0].label is None - assert block_devices[0].uuid == "ec80c004-baec-4a2c-851d-0e1807135511" - assert block_devices[0].size == "20210236928" - assert block_devices[0].model is None - assert block_devices[0].serial is None - assert block_devices[0].type == "part" - assert block_devices[1].name == "sdb" - assert block_devices[1].path == "/dev/sdb" - assert block_devices[1].fsavail == "11888545792" - assert block_devices[1].fssize == "12573614080" - assert block_devices[1].fstype == "ext4" - assert block_devices[1].fsused == "24047616" - assert block_devices[1].mountpoints == ["/volumes/sdb"] - assert block_devices[1].label is None - assert block_devices[1].uuid == "fa9d0026-ee23-4047-b8b1-297ae16fa751" - assert block_devices[1].size == "12884901888" - assert block_devices[1].model == "Volume" - assert block_devices[1].serial == "21378102" - assert block_devices[1].type == "disk" + devices_by_name = {device.name: device for device in block_devices} + sda1 = devices_by_name["sda1"] + sdb = devices_by_name["sdb"] + + assert sda1.name == "sda1" + assert sda1.path == "/dev/sda1" + assert sda1.fsavail == "4605702144" + assert sda1.fssize == "19814920192" + assert sda1.fstype == "ext4" + assert sda1.fsused == "14353719296" + assert sda1.mountpoints == ["/nix/store", "/"] + assert sda1.label is None + assert sda1.uuid == "ec80c004-baec-4a2c-851d-0e1807135511" + assert sda1.size == "20210236928" + assert sda1.model is None + assert sda1.serial is None + assert sda1.type == "part" + + assert sdb.name == "sdb" + assert sdb.path == "/dev/sdb" + assert sdb.fsavail == "11888545792" + assert sdb.fssize == "12573614080" + assert sdb.fstype == "ext4" + assert sdb.fsused == "24047616" + assert sdb.mountpoints == ["/volumes/sdb"] + assert sdb.label is None + assert sdb.uuid == "fa9d0026-ee23-4047-b8b1-297ae16fa751" + assert sdb.size == "12884901888" + assert sdb.model == "Volume" + assert sdb.serial == "21378102" + assert sdb.type == "disk" def test_get_block_device(lsblk_full_mock, authorized_client): @@ -506,3 +512,31 @@ def test_get_root_block_device(lsblk_full_mock, authorized_client): assert block_device.model is None assert block_device.serial is None assert block_device.type == "part" + + +# Unassuming sanity check, yes this did fail +def test_get_real_devices(): + block_devices = BlockDevices().get_block_devices() + + assert block_devices is not None + assert len(block_devices) > 0 + + +# Unassuming sanity check +def test_get_real_root_device(): + BlockDevices().update() + devices = BlockDevices().get_block_devices() + try: + block_device = BlockDevices().get_root_block_device() + except Exception as e: + raise Exception("cannot get root device:", e, "devices found:", devices) + assert block_device is not None + assert block_device.name is not None + assert block_device.name != "" + + +def test_get_real_root_device_raw(authorized_client): + block_device = BlockDevices().get_root_block_device() + assert block_device is not None + assert block_device.name is not None + assert block_device.name != "" From a12126f6850e8e31ed4528edce2a794e27241370 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 11 Oct 2023 17:04:30 +0000 Subject: [PATCH 138/246] feature(service): error handling on moves --- .../graphql/mutations/services_mutations.py | 24 +++++++++++++------ 1 file changed, 17 insertions(+), 7 deletions(-) diff --git a/selfprivacy_api/graphql/mutations/services_mutations.py b/selfprivacy_api/graphql/mutations/services_mutations.py index 86cab10..bac4d88 100644 --- a/selfprivacy_api/graphql/mutations/services_mutations.py +++ b/selfprivacy_api/graphql/mutations/services_mutations.py @@ -4,6 +4,7 @@ import typing import strawberry from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.graphql.common_types.jobs import job_to_api_job +from selfprivacy_api.jobs import JobStatus from selfprivacy_api.graphql.common_types.service import ( Service, @@ -160,10 +161,19 @@ class ServicesMutations: service=service_to_graphql_service(service), ) job = service.move_to_volume(volume) - return ServiceJobMutationReturn( - success=True, - message="Service moved.", - code=200, - service=service_to_graphql_service(service), - job=job_to_api_job(job), - ) + if job.status == JobStatus.FINISHED: + return ServiceJobMutationReturn( + success=True, + message="Service moved.", + code=200, + service=service_to_graphql_service(service), + job=job_to_api_job(job), + ) + else: + return ServiceJobMutationReturn( + success=False, + message=f"Service move failure: {job.status_text}", + code=400, + service=service_to_graphql_service(service), + job=job_to_api_job(job), + ) From 9a1d82ec128046d0302a351ef322ecbca4c7763a Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 11 Oct 2023 17:19:45 +0000 Subject: [PATCH 139/246] test(service): somewhat support moves for dummy service --- .../services/test_service/__init__.py | 42 ++++++++++++++++--- 1 file changed, 37 insertions(+), 5 deletions(-) diff --git a/selfprivacy_api/services/test_service/__init__.py b/selfprivacy_api/services/test_service/__init__.py index 1cb5d9f..c75fc07 100644 --- a/selfprivacy_api/services/test_service/__init__.py +++ b/selfprivacy_api/services/test_service/__init__.py @@ -8,9 +8,10 @@ from os import path # from enum import Enum -from selfprivacy_api.jobs import Job +from selfprivacy_api.jobs import Job, Jobs, JobStatus from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus from selfprivacy_api.utils.block_devices import BlockDevice +from selfprivacy_api.services.generic_service_mover import move_service, FolderMoveNames import selfprivacy_api.utils.network as network_utils from selfprivacy_api.services.test_service.icon import BITWARDEN_ICON @@ -25,6 +26,9 @@ class DummyService(Service): startstop_delay = 0.0 backuppable = True movable = True + # if False, we try to actually move + simulate_moving = True + drive = "sda1" def __init_subclass__(cls, folders: List[str]): cls.folders = folders @@ -161,6 +165,16 @@ class DummyService(Service): def set_delay(cls, new_delay_sec: float) -> None: cls.startstop_delay = new_delay_sec + @classmethod + def set_drive(cls, new_drive: str) -> None: + cls.drive = new_drive + + @classmethod + def set_simulated_moves(cls, enabled: bool) -> None: + """If True, this service will not actually call moving code + when moved""" + cls.simulate_moving = enabled + @classmethod def stop(cls): # simulate a failing service unable to stop @@ -197,9 +211,9 @@ class DummyService(Service): storage_usage = 0 return storage_usage - @staticmethod - def get_drive() -> str: - return "sda1" + @classmethod + def get_drive(cls) -> str: + return cls.drive @classmethod def get_folders(cls) -> List[str]: @@ -226,4 +240,22 @@ class DummyService(Service): ] def move_to_volume(self, volume: BlockDevice) -> Job: - pass + job = Jobs.add( + type_id=f"services.{self.get_id()}.move", + name=f"Move {self.get_display_name()}", + description=f"Moving {self.get_display_name()} data to {volume.name}", + ) + if self.simulate_moving is False: + # completely generic code, TODO: make it the default impl. + move_service( + self, + volume, + job, + FolderMoveNames.default_foldermoves(self), + self.get_id(), + ) + else: + Jobs.update(job, status=JobStatus.FINISHED) + + self.set_drive(volume.name) + return job From c83b1a3442d98c101c5a58dfaf21786a8dc1ddee Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 11 Oct 2023 17:26:16 +0000 Subject: [PATCH 140/246] test(block devices): delete an extra update --- tests/test_block_device_utils.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/test_block_device_utils.py b/tests/test_block_device_utils.py index b41c89e..0fa99f1 100644 --- a/tests/test_block_device_utils.py +++ b/tests/test_block_device_utils.py @@ -524,7 +524,6 @@ def test_get_real_devices(): # Unassuming sanity check def test_get_real_root_device(): - BlockDevices().update() devices = BlockDevices().get_block_devices() try: block_device = BlockDevices().get_root_block_device() From aa287d9cf32e0a8dabb7d736c43ff571f0a8dfc9 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 11 Oct 2023 17:33:03 +0000 Subject: [PATCH 141/246] test(services): try moving to the same device --- tests/data/turned_on.json | 48 ++++++++--------------------- tests/test_graphql/test_services.py | 18 +++++++++++ 2 files changed, 30 insertions(+), 36 deletions(-) diff --git a/tests/data/turned_on.json b/tests/data/turned_on.json index c6b758b..5b41501 100644 --- a/tests/data/turned_on.json +++ b/tests/data/turned_on.json @@ -1,11 +1,6 @@ { - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": true - }, + "api": {"token": "TEST_TOKEN", "enableSwagger": false}, + "bitwarden": {"enable": true}, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", @@ -19,38 +14,19 @@ "ssh": { "enable": true, "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] + "rootKeys": ["ssh-ed25519 KEY test@pc"] }, "username": "tester", - "gitea": { - "enable": true - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "jitsi": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, + "gitea": {"enable": true}, + "ocserv": {"enable": true}, + "pleroma": {"enable": true}, + "jitsi": {"enable": true}, + "autoUpgrade": {"enable": true, "allowReboot": true}, + "useBinds": true, "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, + "sshKeys": ["ssh-rsa KEY test@pc"], + "dns": {"provider": "CLOUDFLARE", "apiKey": "TOKEN"}, + "server": {"provider": "HETZNER"}, "backup": { "provider": "BACKBLAZE", "accountId": "ID", diff --git a/tests/test_graphql/test_services.py b/tests/test_graphql/test_services.py index df409b9..2ab6a41 100644 --- a/tests/test_graphql/test_services.py +++ b/tests/test_graphql/test_services.py @@ -5,6 +5,7 @@ from selfprivacy_api.graphql.mutations.services_mutations import ServicesMutatio import selfprivacy_api.services as service_module from selfprivacy_api.services.service import Service, ServiceStatus from selfprivacy_api.services.test_service import DummyService +from selfprivacy_api.utils.block_devices import BlockDevices import tests.test_graphql.test_api_backup from tests.test_common import raw_dummy_service, dummy_service @@ -468,3 +469,20 @@ def test_move_no_such_volume(authorized_client, only_dummy_service): # is there a meaning in returning the service in this? assert data["service"] is not None assert data["job"] is None + + +def test_move_same_volume(authorized_client, dummy_service): + # dummy_service = only_dummy_service + + # we need a drive that actually exists + root_volume = BlockDevices().get_root_block_device() + dummy_service.set_simulated_moves(False) + dummy_service.set_drive(root_volume.name) + + mutation_response = api_move(authorized_client, dummy_service, root_volume.name) + data = get_data(mutation_response)["services"]["moveService"] + assert_errorcode(data, 400) + + # is there a meaning in returning the service in this? + assert data["service"] is not None + assert data["job"] is not None From 267cdd391b8728326b1c683b61777813188da95c Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 11 Oct 2023 17:34:53 +0000 Subject: [PATCH 142/246] fix(backup): do not store maybe unpicklable service on the queue --- selfprivacy_api/backup/tasks.py | 7 ++++++- .../graphql/mutations/backup_mutations.py | 2 +- tests/test_graphql/test_backup.py | 16 +++++++++++++--- 3 files changed, 20 insertions(+), 5 deletions(-) diff --git a/selfprivacy_api/backup/tasks.py b/selfprivacy_api/backup/tasks.py index 5b36252..a948bff 100644 --- a/selfprivacy_api/backup/tasks.py +++ b/selfprivacy_api/backup/tasks.py @@ -11,7 +11,9 @@ from selfprivacy_api.graphql.common_types.backup import ( from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.utils.huey import huey from huey import crontab + from selfprivacy_api.services.service import Service +from selfprivacy_api.services import get_service_by_id from selfprivacy_api.backup import Backups from selfprivacy_api.jobs import Jobs, JobStatus, Job @@ -34,11 +36,14 @@ def validate_datetime(dt: datetime) -> bool: # huey tasks need to return something @huey.task() def start_backup( - service: Service, reason: BackupReason = BackupReason.EXPLICIT + service_id: str, reason: BackupReason = BackupReason.EXPLICIT ) -> bool: """ The worker task that starts the backup process. """ + service = get_service_by_id(service_id) + if service is None: + raise ValueError(f"No such service: {service_id}") Backups.back_up(service, reason) return True diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index cc1538e..820564c 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -148,7 +148,7 @@ class BackupMutations: ) job = add_backup_job(service) - start_backup(service) + start_backup(service_id) return GenericJobMutationReturn( success=True, diff --git a/tests/test_graphql/test_backup.py b/tests/test_graphql/test_backup.py index 187ce11..bb9e217 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_graphql/test_backup.py @@ -14,6 +14,8 @@ from selfprivacy_api.utils.huey import huey import tempfile +from selfprivacy_api.utils.huey import huey + from tests.test_common import dummy_service, raw_dummy_service from selfprivacy_api.services import Service, get_all_services @@ -69,7 +71,15 @@ def backups_local(tmpdir): @pytest.fixture(scope="function") def backups(tmpdir): - # for those tests that are supposed to pass with any repo + """ + For those tests that are supposed to pass with + both local and cloud repos + """ + + # Sometimes this is false. Idk why. + huey.immediate = True + assert huey.immediate is True + Backups.reset() if BACKUP_PROVIDER_ENVS["kind"] in os.environ.keys(): Backups.set_provider_from_envs() @@ -736,7 +746,7 @@ def simulated_service_stopping_delay(request) -> float: def test_backup_service_task(backups, dummy_service, simulated_service_stopping_delay): dummy_service.set_delay(simulated_service_stopping_delay) - handle = start_backup(dummy_service) + handle = start_backup(dummy_service.get_id()) handle(blocking=True) snaps = Backups.get_snapshots(dummy_service) @@ -781,7 +791,7 @@ def test_backup_larger_file(backups, dummy_service): mega = 2**20 make_large_file(dir, 100 * mega) - handle = start_backup(dummy_service) + handle = start_backup(dummy_service.get_id()) handle(blocking=True) # results will be slightly different on different machines. if someone has troubles with it on their machine, consider dropping this test. From 0b10c083af014858961d9800122d1d041b795b99 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 20 Oct 2023 15:34:14 +0000 Subject: [PATCH 143/246] test(services): test double enables and disables --- tests/test_graphql/test_services.py | 29 +++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/tests/test_graphql/test_services.py b/tests/test_graphql/test_services.py index 2ab6a41..e46ea33 100644 --- a/tests/test_graphql/test_services.py +++ b/tests/test_graphql/test_services.py @@ -448,6 +448,35 @@ def test_stop_start(authorized_client, only_dummy_service): assert api_dummy_service["status"] == ServiceStatus.ACTIVE.value +def test_disable_enable(authorized_client, only_dummy_service): + dummy_service = only_dummy_service + + api_dummy_service = api_all_services(authorized_client)[0] + assert api_dummy_service["isEnabled"] is True + + # attempting to enable an already enableed service + api_enable(authorized_client, dummy_service) + api_dummy_service = api_all_services(authorized_client)[0] + assert api_dummy_service["isEnabled"] is True + assert api_dummy_service["status"] == ServiceStatus.ACTIVE.value + + api_disable(authorized_client, dummy_service) + api_dummy_service = api_all_services(authorized_client)[0] + assert api_dummy_service["isEnabled"] is False + assert api_dummy_service["status"] == ServiceStatus.ACTIVE.value + + # attempting to disable an already disableped service + api_disable(authorized_client, dummy_service) + api_dummy_service = api_all_services(authorized_client)[0] + assert api_dummy_service["isEnabled"] is False + assert api_dummy_service["status"] == ServiceStatus.ACTIVE.value + + api_enable(authorized_client, dummy_service) + api_dummy_service = api_all_services(authorized_client)[0] + assert api_dummy_service["isEnabled"] is True + assert api_dummy_service["status"] == ServiceStatus.ACTIVE.value + + def test_move_immovable(authorized_client, only_dummy_service): dummy_service = only_dummy_service dummy_service.set_movable(False) From 23cc33b9d9604f45adf044c8bd6492b663645461 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 20 Oct 2023 15:38:19 +0000 Subject: [PATCH 144/246] test(services): delete redundant rest tests from bitwarden --- .../services/test_bitwarden.py | 54 ------------------- 1 file changed, 54 deletions(-) diff --git a/tests/test_rest_endpoints/services/test_bitwarden.py b/tests/test_rest_endpoints/services/test_bitwarden.py index 3977253..f3e3674 100644 --- a/tests/test_rest_endpoints/services/test_bitwarden.py +++ b/tests/test_rest_endpoints/services/test_bitwarden.py @@ -43,60 +43,6 @@ def bitwarden_undefined(mocker, datadir): ############################################################################### -@pytest.mark.parametrize("endpoint", ["enable", "disable"]) -def test_unauthorized(client, bitwarden_off, endpoint): - response = client.post(f"/services/bitwarden/{endpoint}") - assert response.status_code == 401 - - -@pytest.mark.parametrize("endpoint", ["enable", "disable"]) -def test_illegal_methods(authorized_client, bitwarden_off, endpoint): - response = authorized_client.get(f"/services/bitwarden/{endpoint}") - assert response.status_code == 405 - response = authorized_client.put(f"/services/bitwarden/{endpoint}") - assert response.status_code == 405 - response = authorized_client.delete(f"/services/bitwarden/{endpoint}") - assert response.status_code == 405 - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_switch_from_off(authorized_client, bitwarden_off, endpoint, target_file): - response = authorized_client.post(f"/services/bitwarden/{endpoint}") - assert response.status_code == 200 - assert read_json(bitwarden_off / "turned_off.json") == read_json( - bitwarden_off / target_file - ) - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_switch_from_on(authorized_client, bitwarden_on, endpoint, target_file): - response = authorized_client.post(f"/services/bitwarden/{endpoint}") - assert response.status_code == 200 - assert read_json(bitwarden_on / "turned_on.json") == read_json( - bitwarden_on / target_file - ) - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_switch_twice(authorized_client, bitwarden_off, endpoint, target_file): - response = authorized_client.post(f"/services/bitwarden/{endpoint}") - assert response.status_code == 200 - response = authorized_client.post(f"/services/bitwarden/{endpoint}") - assert response.status_code == 200 - assert read_json(bitwarden_off / "turned_off.json") == read_json( - bitwarden_off / target_file - ) - - @pytest.mark.parametrize( "endpoint,target_file", [("enable", "turned_on.json"), ("disable", "turned_off.json")], From e1083f32212801a9b48720048767e8ed864cc76d Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 20 Oct 2023 16:05:12 +0000 Subject: [PATCH 145/246] refactor(services): make a default implementation of enable/disable --- selfprivacy_api/services/service.py | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index fbe0007..636b7f8 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -12,6 +12,7 @@ from selfprivacy_api.services.generic_size_counter import get_storage_usage from selfprivacy_api.services.owned_path import OwnedPath from selfprivacy_api import utils from selfprivacy_api.utils.waitloop import wait_until_true +from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain DEFAULT_START_STOP_TIMEOUT = 5 * 60 @@ -137,17 +138,24 @@ class Service(ABC): """The status of the service, reported by systemd.""" pass - @staticmethod - @abstractmethod - def enable(): + # But they do not really enable? + @classmethod + def enable(cls): """Enable the service. Usually this means enabling systemd unit.""" - pass + name = cls.get_id() + with WriteUserData() as user_data: + if "gitea" not in user_data: + user_data[name] = {} + user_data[name]["enable"] = True - @staticmethod - @abstractmethod - def disable(): + @classmethod + def disable(cls): """Disable the service. Usually this means disabling systemd unit.""" - pass + name = cls.get_id() + with WriteUserData() as user_data: + if "gitea" not in user_data: + user_data[name] = {} + user_data[name]["enable"] = False @staticmethod @abstractmethod From 708c5cbc98e0d5310e032b364c694407e3084f64 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 20 Oct 2023 16:06:30 +0000 Subject: [PATCH 146/246] refactor(services): delete enable/disable from gitea --- selfprivacy_api/services/gitea/__init__.py | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/selfprivacy_api/services/gitea/__init__.py b/selfprivacy_api/services/gitea/__init__.py index 08f223e..f2aa6d0 100644 --- a/selfprivacy_api/services/gitea/__init__.py +++ b/selfprivacy_api/services/gitea/__init__.py @@ -71,22 +71,6 @@ class Gitea(Service): """ return get_service_status("gitea.service") - @staticmethod - def enable(): - """Enable Gitea service.""" - with WriteUserData() as user_data: - if "gitea" not in user_data: - user_data["gitea"] = {} - user_data["gitea"]["enable"] = True - - @staticmethod - def disable(): - """Disable Gitea service.""" - with WriteUserData() as user_data: - if "gitea" not in user_data: - user_data["gitea"] = {} - user_data["gitea"]["enable"] = False - @staticmethod def stop(): subprocess.run(["systemctl", "stop", "gitea.service"]) From 6f035dc0db8ff83226547f419957110e1d0276bc Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 20 Oct 2023 16:14:43 +0000 Subject: [PATCH 147/246] refactor(services): add default implementation to get_enabled --- selfprivacy_api/services/service.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index 636b7f8..eca366f 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -126,11 +126,12 @@ class Service(ABC): """ pass - @staticmethod - @abstractmethod - def is_enabled() -> bool: + @classmethod + def is_enabled(cls) -> bool: """`True` if the service is enabled.""" - pass + name = cls.get_id() + with ReadUserData() as user_data: + return user_data.get(name, {}).get("enable", False) @staticmethod @abstractmethod From c7be9c7427fe1b51b5fe40d90c0a46edea5012c4 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 20 Oct 2023 16:21:20 +0000 Subject: [PATCH 148/246] refactor(services): delete is_enabled() from gitea --- selfprivacy_api/services/gitea/__init__.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/selfprivacy_api/services/gitea/__init__.py b/selfprivacy_api/services/gitea/__init__.py index f2aa6d0..9b6f80f 100644 --- a/selfprivacy_api/services/gitea/__init__.py +++ b/selfprivacy_api/services/gitea/__init__.py @@ -54,11 +54,6 @@ class Gitea(Service): def get_backup_description() -> str: return "Git repositories, database and user data." - @staticmethod - def is_enabled() -> bool: - with ReadUserData() as user_data: - return user_data.get("gitea", {}).get("enable", False) - @staticmethod def get_status() -> ServiceStatus: """ From 0078ed0c3a911a0666fce2cde8c11fd662c88fa5 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 20 Oct 2023 16:31:52 +0000 Subject: [PATCH 149/246] refactor(services): delete xxenablexx functions from jitsi --- selfprivacy_api/services/jitsi/__init__.py | 21 --------------------- 1 file changed, 21 deletions(-) diff --git a/selfprivacy_api/services/jitsi/__init__.py b/selfprivacy_api/services/jitsi/__init__.py index fed6f33..d5677cc 100644 --- a/selfprivacy_api/services/jitsi/__init__.py +++ b/selfprivacy_api/services/jitsi/__init__.py @@ -55,33 +55,12 @@ class Jitsi(Service): def get_backup_description() -> str: return "Secrets that are used to encrypt the communication." - @staticmethod - def is_enabled() -> bool: - with ReadUserData() as user_data: - return user_data.get("jitsi", {}).get("enable", False) - @staticmethod def get_status() -> ServiceStatus: return get_service_status_from_several_units( ["jitsi-videobridge.service", "jicofo.service"] ) - @staticmethod - def enable(): - """Enable Jitsi service.""" - with WriteUserData() as user_data: - if "jitsi" not in user_data: - user_data["jitsi"] = {} - user_data["jitsi"]["enable"] = True - - @staticmethod - def disable(): - """Disable Gitea service.""" - with WriteUserData() as user_data: - if "jitsi" not in user_data: - user_data["jitsi"] = {} - user_data["jitsi"]["enable"] = False - @staticmethod def stop(): subprocess.run( From d7c75e0aa8a5a3de0d8ce8c6d199538e93e0f56c Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 20 Oct 2023 17:02:43 +0000 Subject: [PATCH 150/246] fix(services): do not randomly exit the huey immediate mode --- tests/test_common.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/test_common.py b/tests/test_common.py index 0bcd4bc..5da43e9 100644 --- a/tests/test_common.py +++ b/tests/test_common.py @@ -5,6 +5,7 @@ import os import pytest from selfprivacy_api.utils import WriteUserData, ReadUserData +from selfprivacy_api.utils.huey import huey from os import path from os import makedirs @@ -53,6 +54,9 @@ def dummy_service(tmpdir, raw_dummy_service) -> Generator[Service, None, None]: # register our service services.services.append(service) + huey.immediate = True + assert huey.immediate is True + assert get_service_by_id(service.get_id()) is not None yield service From 1a65545c290ab50c8106a8316e6997ca1ff33c4f Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 4 Dec 2023 15:35:50 +0000 Subject: [PATCH 151/246] test(backups, token_repo): move non-graphql tests out of graphql --- tests/{test_graphql => }/test_backup.py | 0 tests/test_graphql/test_api_backup.py | 4 ++-- tests/test_graphql/test_services.py | 2 +- tests/{test_graphql => }/test_localsecret.py | 0 .../test_repository/test_json_tokens_repository.py | 0 .../test_json_tokens_repository/empty_keys.json | 9 +++++++++ .../test_json_tokens_repository/null_keys.json | 0 .../test_json_tokens_repository/tokens.json | 0 .../test_repository/test_tokens_repository.py | 0 .../test_tokens_repository/empty_keys.json | 9 +++++++++ .../test_tokens_repository/null_keys.json | 0 .../test_repository/test_tokens_repository/tokens.json | 0 12 files changed, 21 insertions(+), 3 deletions(-) rename tests/{test_graphql => }/test_backup.py (100%) rename tests/{test_graphql => }/test_localsecret.py (100%) rename tests/{test_graphql => }/test_repository/test_json_tokens_repository.py (100%) create mode 100644 tests/test_repository/test_json_tokens_repository/empty_keys.json rename tests/{test_graphql => }/test_repository/test_json_tokens_repository/null_keys.json (100%) rename tests/{test_graphql => }/test_repository/test_json_tokens_repository/tokens.json (100%) rename tests/{test_graphql => }/test_repository/test_tokens_repository.py (100%) create mode 100644 tests/test_repository/test_tokens_repository/empty_keys.json rename tests/{test_graphql => }/test_repository/test_tokens_repository/null_keys.json (100%) rename tests/{test_graphql => }/test_repository/test_tokens_repository/tokens.json (100%) diff --git a/tests/test_graphql/test_backup.py b/tests/test_backup.py similarity index 100% rename from tests/test_graphql/test_backup.py rename to tests/test_backup.py diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index bc4b7f1..50d65d8 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -1,6 +1,6 @@ from os import path -from tests.test_graphql.test_backup import backups -from tests.test_graphql.test_backup import raw_dummy_service, dummy_service +from tests.test_backup import backups +from tests.test_common import raw_dummy_service, dummy_service from tests.common import generate_backup_query diff --git a/tests/test_graphql/test_services.py b/tests/test_graphql/test_services.py index e46ea33..aa8d2f3 100644 --- a/tests/test_graphql/test_services.py +++ b/tests/test_graphql/test_services.py @@ -10,7 +10,7 @@ from selfprivacy_api.utils.block_devices import BlockDevices import tests.test_graphql.test_api_backup from tests.test_common import raw_dummy_service, dummy_service from tests.common import generate_service_query -from tests.test_graphql.common import assert_ok, get_data +from tests.test_graphql.test_api_backup import assert_ok, get_data @pytest.fixture() diff --git a/tests/test_graphql/test_localsecret.py b/tests/test_localsecret.py similarity index 100% rename from tests/test_graphql/test_localsecret.py rename to tests/test_localsecret.py diff --git a/tests/test_graphql/test_repository/test_json_tokens_repository.py b/tests/test_repository/test_json_tokens_repository.py similarity index 100% rename from tests/test_graphql/test_repository/test_json_tokens_repository.py rename to tests/test_repository/test_json_tokens_repository.py diff --git a/tests/test_repository/test_json_tokens_repository/empty_keys.json b/tests/test_repository/test_json_tokens_repository/empty_keys.json new file mode 100644 index 0000000..2131ddf --- /dev/null +++ b/tests/test_repository/test_json_tokens_repository/empty_keys.json @@ -0,0 +1,9 @@ +{ + "tokens": [ + { + "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", + "name": "primary_token", + "date": "2022-07-15 17:41:31.675698" + } + ] +} diff --git a/tests/test_graphql/test_repository/test_json_tokens_repository/null_keys.json b/tests/test_repository/test_json_tokens_repository/null_keys.json similarity index 100% rename from tests/test_graphql/test_repository/test_json_tokens_repository/null_keys.json rename to tests/test_repository/test_json_tokens_repository/null_keys.json diff --git a/tests/test_graphql/test_repository/test_json_tokens_repository/tokens.json b/tests/test_repository/test_json_tokens_repository/tokens.json similarity index 100% rename from tests/test_graphql/test_repository/test_json_tokens_repository/tokens.json rename to tests/test_repository/test_json_tokens_repository/tokens.json diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_repository/test_tokens_repository.py similarity index 100% rename from tests/test_graphql/test_repository/test_tokens_repository.py rename to tests/test_repository/test_tokens_repository.py diff --git a/tests/test_repository/test_tokens_repository/empty_keys.json b/tests/test_repository/test_tokens_repository/empty_keys.json new file mode 100644 index 0000000..2131ddf --- /dev/null +++ b/tests/test_repository/test_tokens_repository/empty_keys.json @@ -0,0 +1,9 @@ +{ + "tokens": [ + { + "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", + "name": "primary_token", + "date": "2022-07-15 17:41:31.675698" + } + ] +} diff --git a/tests/test_graphql/test_repository/test_tokens_repository/null_keys.json b/tests/test_repository/test_tokens_repository/null_keys.json similarity index 100% rename from tests/test_graphql/test_repository/test_tokens_repository/null_keys.json rename to tests/test_repository/test_tokens_repository/null_keys.json diff --git a/tests/test_graphql/test_repository/test_tokens_repository/tokens.json b/tests/test_repository/test_tokens_repository/tokens.json similarity index 100% rename from tests/test_graphql/test_repository/test_tokens_repository/tokens.json rename to tests/test_repository/test_tokens_repository/tokens.json From 2e59e7e880f85345a367eee437fdf15e3f8db207 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 6 Dec 2023 13:57:39 +0000 Subject: [PATCH 152/246] better error reporting in graphql tests --- tests/test_graphql/common.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/tests/test_graphql/common.py b/tests/test_graphql/common.py index d473433..1a415bc 100644 --- a/tests/test_graphql/common.py +++ b/tests/test_graphql/common.py @@ -25,7 +25,13 @@ def assert_empty(response): def assert_data(response): assert response.status_code == 200 - data = response.json().get("data") + response = response.json() + + if ( + "errors" in response.keys() + ): # convenience for debugging, this will display error + raise ValueError(response["errors"]) + data = response.get("data") assert data is not None return data From f5999516fa3722922ac507940deca75469ce9664 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 22 Nov 2023 14:47:57 +0000 Subject: [PATCH 153/246] feature(services): better error reporting in disable and enable service --- .../graphql/mutations/services_mutations.py | 39 ++++++++++++++----- 1 file changed, 29 insertions(+), 10 deletions(-) diff --git a/selfprivacy_api/graphql/mutations/services_mutations.py b/selfprivacy_api/graphql/mutations/services_mutations.py index bac4d88..ad3b1b9 100644 --- a/selfprivacy_api/graphql/mutations/services_mutations.py +++ b/selfprivacy_api/graphql/mutations/services_mutations.py @@ -48,14 +48,22 @@ class ServicesMutations: @strawberry.mutation(permission_classes=[IsAuthenticated]) def enable_service(self, service_id: str) -> ServiceMutationReturn: """Enable service.""" - service = get_service_by_id(service_id) - if service is None: + try: + service = get_service_by_id(service_id) + if service is None: + return ServiceMutationReturn( + success=False, + message="Service not found.", + code=404, + ) + service.enable() + except Exception as e: return ServiceMutationReturn( success=False, - message="Service not found.", - code=404, + message=format_error(e), + code=400, ) - service.enable() + return ServiceMutationReturn( success=True, message="Service enabled.", @@ -66,14 +74,21 @@ class ServicesMutations: @strawberry.mutation(permission_classes=[IsAuthenticated]) def disable_service(self, service_id: str) -> ServiceMutationReturn: """Disable service.""" - service = get_service_by_id(service_id) - if service is None: + try: + service = get_service_by_id(service_id) + if service is None: + return ServiceMutationReturn( + success=False, + message="Service not found.", + code=404, + ) + service.disable() + except Exception as e: return ServiceMutationReturn( success=False, - message="Service not found.", - code=404, + message=format_error(e), + code=400, ) - service.disable() return ServiceMutationReturn( success=True, message="Service disabled.", @@ -177,3 +192,7 @@ class ServicesMutations: service=service_to_graphql_service(service), job=job_to_api_job(job), ) + + +def format_error(e: Exception) -> str: + return type(e).__name__ + ": " + str(e) From 368ab22fbb67dd0d69d9cc5ce716fac7317bb6fb Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 22 Nov 2023 14:50:53 +0000 Subject: [PATCH 154/246] fix(services): replace stray gitea reference with a generic identifier in deiable/enable --- selfprivacy_api/services/service.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index eca366f..dc7579d 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -145,7 +145,7 @@ class Service(ABC): """Enable the service. Usually this means enabling systemd unit.""" name = cls.get_id() with WriteUserData() as user_data: - if "gitea" not in user_data: + if name not in user_data: user_data[name] = {} user_data[name]["enable"] = True @@ -154,7 +154,7 @@ class Service(ABC): """Disable the service. Usually this means disabling systemd unit.""" name = cls.get_id() with WriteUserData() as user_data: - if "gitea" not in user_data: + if name not in user_data: user_data[name] = {} user_data[name]["enable"] = False From 5c1dd93931648960dcfe5285472607202b85d214 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 22 Nov 2023 14:52:33 +0000 Subject: [PATCH 155/246] test(services): test that undisableable services are handled correctly --- tests/test_graphql/test_services.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/tests/test_graphql/test_services.py b/tests/test_graphql/test_services.py index aa8d2f3..a266f63 100644 --- a/tests/test_graphql/test_services.py +++ b/tests/test_graphql/test_services.py @@ -3,6 +3,7 @@ from typing import Generator from selfprivacy_api.graphql.mutations.services_mutations import ServicesMutations import selfprivacy_api.services as service_module +from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services.service import Service, ServiceStatus from selfprivacy_api.services.test_service import DummyService from selfprivacy_api.utils.block_devices import BlockDevices @@ -515,3 +516,18 @@ def test_move_same_volume(authorized_client, dummy_service): # is there a meaning in returning the service in this? assert data["service"] is not None assert data["job"] is not None + + +def test_mailservice_cannot_enable_disable(authorized_client): + mailservice = get_service_by_id("email") + + mutation_response = api_enable(authorized_client, mailservice) + data = get_data(mutation_response)["services"]["enableService"] + assert_errorcode(data, 400) + # TODO?: we cannot convert mailservice to graphql Service without /var/domain yet + # assert data["service"] is not None + + mutation_response = api_disable(authorized_client, mailservice) + data = get_data(mutation_response)["services"]["disableService"] + assert_errorcode(data, 400) + # assert data["service"] is not None From ffc60fc8b4863bf9704e7965311dd683fceb018e Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 22 Nov 2023 15:26:21 +0000 Subject: [PATCH 156/246] test(services): use actual json enabling and disabling --- .../services/test_service/__init__.py | 34 ------------------- tests/test_common.py | 5 ++- 2 files changed, 4 insertions(+), 35 deletions(-) diff --git a/selfprivacy_api/services/test_service/__init__.py b/selfprivacy_api/services/test_service/__init__.py index c75fc07..1e315f5 100644 --- a/selfprivacy_api/services/test_service/__init__.py +++ b/selfprivacy_api/services/test_service/__init__.py @@ -37,8 +37,6 @@ class DummyService(Service): super().__init__() with open(self.status_file(), "w") as file: file.write(ServiceStatus.ACTIVE.value) - with open(self.enabled_file(), "w") as file: - file.write("True") @staticmethod def get_id() -> str: @@ -79,36 +77,12 @@ class DummyService(Service): def get_backup_description() -> str: return "How did we get here?" - @classmethod - def is_enabled(cls) -> bool: - return cls.get_enabled() - @classmethod def status_file(cls) -> str: dir = cls.folders[0] # we do not REALLY want to store our state in our declared folders return path.join(dir, "..", "service_status") - @classmethod - def enabled_file(cls) -> str: - dir = cls.folders[0] - return path.join(dir, "..", "service_enabled") - - @classmethod - def get_enabled(cls) -> bool: - with open(cls.enabled_file(), "r") as file: - string = file.read().strip() - if "True" in string: - return True - if "False" in string: - return False - raise ValueError("test service enabled/disabled status file got corrupted") - - @classmethod - def set_enabled(cls, enabled: bool): - with open(cls.enabled_file(), "w") as file: - status_string = file.write(str(enabled)) - @classmethod def set_status(cls, status: ServiceStatus): with open(cls.status_file(), "w") as file: @@ -153,14 +127,6 @@ class DummyService(Service): """`True` if the service can be backed up.""" return cls.backuppable - @classmethod - def enable(cls): - cls.set_enabled(True) - - @classmethod - def disable(cls): - cls.set_enabled(False) - @classmethod def set_delay(cls, new_delay_sec: float) -> None: cls.startstop_delay = new_delay_sec diff --git a/tests/test_common.py b/tests/test_common.py index 5da43e9..5c433a0 100644 --- a/tests/test_common.py +++ b/tests/test_common.py @@ -48,7 +48,9 @@ def raw_dummy_service(tmpdir): @pytest.fixture() -def dummy_service(tmpdir, raw_dummy_service) -> Generator[Service, None, None]: +def dummy_service( + tmpdir, raw_dummy_service, generic_userdata +) -> Generator[Service, None, None]: service = raw_dummy_service # register our service @@ -58,6 +60,7 @@ def dummy_service(tmpdir, raw_dummy_service) -> Generator[Service, None, None]: assert huey.immediate is True assert get_service_by_id(service.get_id()) is not None + service.enable() yield service # cleanup because apparently it matters wrt tasks From 22f157b6ff38bae971f4f0f52c32bd78d8373ade Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 22 Nov 2023 16:02:57 +0000 Subject: [PATCH 157/246] test(services): add a test that we actually read json --- tests/test_graphql/test_services.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/tests/test_graphql/test_services.py b/tests/test_graphql/test_services.py index a266f63..0a84122 100644 --- a/tests/test_graphql/test_services.py +++ b/tests/test_graphql/test_services.py @@ -1,12 +1,14 @@ import pytest from typing import Generator +from selfprivacy_api.utils import ReadUserData, WriteUserData +from selfprivacy_api.utils.block_devices import BlockDevices + from selfprivacy_api.graphql.mutations.services_mutations import ServicesMutations import selfprivacy_api.services as service_module from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services.service import Service, ServiceStatus from selfprivacy_api.services.test_service import DummyService -from selfprivacy_api.utils.block_devices import BlockDevices import tests.test_graphql.test_api_backup from tests.test_common import raw_dummy_service, dummy_service @@ -531,3 +533,12 @@ def test_mailservice_cannot_enable_disable(authorized_client): data = get_data(mutation_response)["services"]["disableService"] assert_errorcode(data, 400) # assert data["service"] is not None + + +def enabling_disabling_reads_json(dummy_service: DummyService): + with WriteUserData() as data: + data[dummy_service.get_id()]["enabled"] = False + assert dummy_service.is_enabled() is False + with WriteUserData() as data: + data[dummy_service.get_id()]["enabled"] = True + assert dummy_service.is_enabled() is True From 834e8c060331854755a40bb9acbc1c0629e98a63 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 22 Nov 2023 16:08:47 +0000 Subject: [PATCH 158/246] test(services): add a test that we actually write json --- tests/test_graphql/test_services.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/tests/test_graphql/test_services.py b/tests/test_graphql/test_services.py index 0a84122..f55a488 100644 --- a/tests/test_graphql/test_services.py +++ b/tests/test_graphql/test_services.py @@ -542,3 +542,15 @@ def enabling_disabling_reads_json(dummy_service: DummyService): with WriteUserData() as data: data[dummy_service.get_id()]["enabled"] = True assert dummy_service.is_enabled() is True + + +def enabling_disabling_writes_json(dummy_service: DummyService): + dummy_service.disable() + with ReadUserData() as data: + assert data[dummy_service.get_id()]["enabled"] is False + dummy_service.enable() + with ReadUserData() as data: + assert data[dummy_service.get_id()]["enabled"] is True + dummy_service.disable() + with ReadUserData() as data: + assert data[dummy_service.get_id()]["enabled"] is False From bf0b774295e294bbba54b308c57e8017d6645fb6 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 22 Nov 2023 16:33:21 +0000 Subject: [PATCH 159/246] test(services): fix last tests being not enabled, and a field typo0 --- tests/test_graphql/test_services.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/tests/test_graphql/test_services.py b/tests/test_graphql/test_services.py index f55a488..d67c053 100644 --- a/tests/test_graphql/test_services.py +++ b/tests/test_graphql/test_services.py @@ -535,22 +535,23 @@ def test_mailservice_cannot_enable_disable(authorized_client): # assert data["service"] is not None -def enabling_disabling_reads_json(dummy_service: DummyService): +def test_enabling_disabling_reads_json(dummy_service: DummyService): with WriteUserData() as data: - data[dummy_service.get_id()]["enabled"] = False + data[dummy_service.get_id()]["enable"] = False assert dummy_service.is_enabled() is False with WriteUserData() as data: - data[dummy_service.get_id()]["enabled"] = True + data[dummy_service.get_id()]["enable"] = True assert dummy_service.is_enabled() is True -def enabling_disabling_writes_json(dummy_service: DummyService): +def test_enabling_disabling_writes_json(dummy_service: DummyService): + dummy_service.disable() with ReadUserData() as data: - assert data[dummy_service.get_id()]["enabled"] is False + assert data[dummy_service.get_id()]["enable"] is False dummy_service.enable() with ReadUserData() as data: - assert data[dummy_service.get_id()]["enabled"] is True + assert data[dummy_service.get_id()]["enable"] is True dummy_service.disable() with ReadUserData() as data: - assert data[dummy_service.get_id()]["enabled"] is False + assert data[dummy_service.get_id()]["enable"] is False From bcf57ea738ea9a715d1051262a019cc08502be58 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 22 Nov 2023 16:36:26 +0000 Subject: [PATCH 160/246] test(services): test possibly undefined json fields. On writing --- tests/test_graphql/test_services.py | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/tests/test_graphql/test_services.py b/tests/test_graphql/test_services.py index d67c053..8a88ef7 100644 --- a/tests/test_graphql/test_services.py +++ b/tests/test_graphql/test_services.py @@ -544,7 +544,23 @@ def test_enabling_disabling_reads_json(dummy_service: DummyService): assert dummy_service.is_enabled() is True -def test_enabling_disabling_writes_json(dummy_service: DummyService): +@pytest.fixture(params=["normally_enabled", "deleted_attribute", "service_not_in_json"]) +def possibly_dubiously_enabled_service( + dummy_service: DummyService, request +) -> DummyService: + if request.param == "deleted_attribute": + with WriteUserData() as data: + del data[dummy_service.get_id()]["enable"] + if request.param == "service_not_in_json": + with WriteUserData() as data: + del data[dummy_service.get_id()] + return dummy_service + + +def test_enabling_disabling_writes_json( + possibly_dubiously_enabled_service: DummyService, +): + dummy_service = possibly_dubiously_enabled_service dummy_service.disable() with ReadUserData() as data: From 9d3fd45c2c98c035554f141ae54e81f5ebb9274a Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 22 Nov 2023 16:51:07 +0000 Subject: [PATCH 161/246] =?UTF-8?q?test=EE=81=91(services):=20missing=20in?= =?UTF-8?q?fo=20on=20service=20enabled=20status=20returns=20False?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- selfprivacy_api/services/service.py | 7 ++++++- tests/test_graphql/test_services.py | 19 +++++++++++++++++++ 2 files changed, 25 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index dc7579d..a53c028 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -128,7 +128,12 @@ class Service(ABC): @classmethod def is_enabled(cls) -> bool: - """`True` if the service is enabled.""" + """ + `True` if the service is enabled. + `False` if it is not enabled or not defined in file + If there is nothing in the file, this is equivalent to False + because NixOS won't enable it then. + """ name = cls.get_id() with ReadUserData() as user_data: return user_data.get(name, {}).get("enable", False) diff --git a/tests/test_graphql/test_services.py b/tests/test_graphql/test_services.py index 8a88ef7..f28f204 100644 --- a/tests/test_graphql/test_services.py +++ b/tests/test_graphql/test_services.py @@ -557,6 +557,25 @@ def possibly_dubiously_enabled_service( return dummy_service +# Yeah, idk yet how to dry it. +@pytest.fixture(params=["deleted_attribute", "service_not_in_json"]) +def undefined_enabledness_service(dummy_service: DummyService, request) -> DummyService: + if request.param == "deleted_attribute": + with WriteUserData() as data: + del data[dummy_service.get_id()]["enable"] + if request.param == "service_not_in_json": + with WriteUserData() as data: + del data[dummy_service.get_id()] + return dummy_service + + +def test_undefined_enabledness_in_json_means_False( + undefined_enabledness_service: DummyService, +): + dummy_service = undefined_enabledness_service + assert dummy_service.is_enabled() is False + + def test_enabling_disabling_writes_json( possibly_dubiously_enabled_service: DummyService, ): From c1cc1e00ed0dfbee325ee1e9db87b0278638ccb3 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 22 Nov 2023 17:26:00 +0000 Subject: [PATCH 162/246] test(services): move non-gql enable+json tests out of gql tests towards backend tests --- tests/test_graphql/test_services.py | 60 --------------------------- tests/test_services.py | 64 ++++++++++++++++++++++++++++- 2 files changed, 62 insertions(+), 62 deletions(-) diff --git a/tests/test_graphql/test_services.py b/tests/test_graphql/test_services.py index f28f204..e86d070 100644 --- a/tests/test_graphql/test_services.py +++ b/tests/test_graphql/test_services.py @@ -1,16 +1,13 @@ import pytest from typing import Generator -from selfprivacy_api.utils import ReadUserData, WriteUserData from selfprivacy_api.utils.block_devices import BlockDevices -from selfprivacy_api.graphql.mutations.services_mutations import ServicesMutations import selfprivacy_api.services as service_module from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services.service import Service, ServiceStatus from selfprivacy_api.services.test_service import DummyService -import tests.test_graphql.test_api_backup from tests.test_common import raw_dummy_service, dummy_service from tests.common import generate_service_query from tests.test_graphql.test_api_backup import assert_ok, get_data @@ -533,60 +530,3 @@ def test_mailservice_cannot_enable_disable(authorized_client): data = get_data(mutation_response)["services"]["disableService"] assert_errorcode(data, 400) # assert data["service"] is not None - - -def test_enabling_disabling_reads_json(dummy_service: DummyService): - with WriteUserData() as data: - data[dummy_service.get_id()]["enable"] = False - assert dummy_service.is_enabled() is False - with WriteUserData() as data: - data[dummy_service.get_id()]["enable"] = True - assert dummy_service.is_enabled() is True - - -@pytest.fixture(params=["normally_enabled", "deleted_attribute", "service_not_in_json"]) -def possibly_dubiously_enabled_service( - dummy_service: DummyService, request -) -> DummyService: - if request.param == "deleted_attribute": - with WriteUserData() as data: - del data[dummy_service.get_id()]["enable"] - if request.param == "service_not_in_json": - with WriteUserData() as data: - del data[dummy_service.get_id()] - return dummy_service - - -# Yeah, idk yet how to dry it. -@pytest.fixture(params=["deleted_attribute", "service_not_in_json"]) -def undefined_enabledness_service(dummy_service: DummyService, request) -> DummyService: - if request.param == "deleted_attribute": - with WriteUserData() as data: - del data[dummy_service.get_id()]["enable"] - if request.param == "service_not_in_json": - with WriteUserData() as data: - del data[dummy_service.get_id()] - return dummy_service - - -def test_undefined_enabledness_in_json_means_False( - undefined_enabledness_service: DummyService, -): - dummy_service = undefined_enabledness_service - assert dummy_service.is_enabled() is False - - -def test_enabling_disabling_writes_json( - possibly_dubiously_enabled_service: DummyService, -): - dummy_service = possibly_dubiously_enabled_service - - dummy_service.disable() - with ReadUserData() as data: - assert data[dummy_service.get_id()]["enable"] is False - dummy_service.enable() - with ReadUserData() as data: - assert data[dummy_service.get_id()]["enable"] is True - dummy_service.disable() - with ReadUserData() as data: - assert data[dummy_service.get_id()]["enable"] is False diff --git a/tests/test_services.py b/tests/test_services.py index 3eef0cd..3addf05 100644 --- a/tests/test_services.py +++ b/tests/test_services.py @@ -1,8 +1,12 @@ """ Tests for generic service methods """ +import pytest from pytest import raises +from selfprivacy_api.utils import ReadUserData, WriteUserData +from selfprivacy_api.utils.waitloop import wait_until_true + from selfprivacy_api.services.bitwarden import Bitwarden from selfprivacy_api.services.pleroma import Pleroma from selfprivacy_api.services.owned_path import OwnedPath @@ -10,9 +14,8 @@ from selfprivacy_api.services.generic_service_mover import FolderMoveNames from selfprivacy_api.services.test_service import DummyService from selfprivacy_api.services.service import Service, ServiceStatus, StoppedService -from selfprivacy_api.utils.waitloop import wait_until_true -from tests.test_common import raw_dummy_service +from tests.test_common import raw_dummy_service, dummy_service def test_unimplemented_folders_raises(): @@ -87,3 +90,60 @@ def test_foldermoves_from_ownedpaths(): group="vaultwarden", owner="vaultwarden", ) + + +def test_enabling_disabling_reads_json(dummy_service: DummyService): + with WriteUserData() as data: + data[dummy_service.get_id()]["enable"] = False + assert dummy_service.is_enabled() is False + with WriteUserData() as data: + data[dummy_service.get_id()]["enable"] = True + assert dummy_service.is_enabled() is True + + +@pytest.fixture(params=["normally_enabled", "deleted_attribute", "service_not_in_json"]) +def possibly_dubiously_enabled_service( + dummy_service: DummyService, request +) -> DummyService: + if request.param == "deleted_attribute": + with WriteUserData() as data: + del data[dummy_service.get_id()]["enable"] + if request.param == "service_not_in_json": + with WriteUserData() as data: + del data[dummy_service.get_id()] + return dummy_service + + +# Yeah, idk yet how to dry it. +@pytest.fixture(params=["deleted_attribute", "service_not_in_json"]) +def undefined_enabledness_service(dummy_service: DummyService, request) -> DummyService: + if request.param == "deleted_attribute": + with WriteUserData() as data: + del data[dummy_service.get_id()]["enable"] + if request.param == "service_not_in_json": + with WriteUserData() as data: + del data[dummy_service.get_id()] + return dummy_service + + +def test_undefined_enabledness_in_json_means_False( + undefined_enabledness_service: DummyService, +): + dummy_service = undefined_enabledness_service + assert dummy_service.is_enabled() is False + + +def test_enabling_disabling_writes_json( + possibly_dubiously_enabled_service: DummyService, +): + dummy_service = possibly_dubiously_enabled_service + + dummy_service.disable() + with ReadUserData() as data: + assert data[dummy_service.get_id()]["enable"] is False + dummy_service.enable() + with ReadUserData() as data: + assert data[dummy_service.get_id()]["enable"] is True + dummy_service.disable() + with ReadUserData() as data: + assert data[dummy_service.get_id()]["enable"] is False From 29870652314c6c2af10a3fef6742e3051ec55645 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 22 Nov 2023 17:31:13 +0000 Subject: [PATCH 163/246] test(services): remove bitwarden tests because redundant --- .../services/test_bitwarden.py | 71 ------------------- .../test_bitwarden/enable_undefined.json | 56 --------------- .../services/test_bitwarden/turned_off.json | 57 --------------- .../services/test_bitwarden/turned_on.json | 57 --------------- .../services/test_bitwarden/undefined.json | 54 -------------- 5 files changed, 295 deletions(-) delete mode 100644 tests/test_rest_endpoints/services/test_bitwarden.py delete mode 100644 tests/test_rest_endpoints/services/test_bitwarden/enable_undefined.json delete mode 100644 tests/test_rest_endpoints/services/test_bitwarden/turned_off.json delete mode 100644 tests/test_rest_endpoints/services/test_bitwarden/turned_on.json delete mode 100644 tests/test_rest_endpoints/services/test_bitwarden/undefined.json diff --git a/tests/test_rest_endpoints/services/test_bitwarden.py b/tests/test_rest_endpoints/services/test_bitwarden.py deleted file mode 100644 index f3e3674..0000000 --- a/tests/test_rest_endpoints/services/test_bitwarden.py +++ /dev/null @@ -1,71 +0,0 @@ -import json -import pytest - - -def read_json(file_path): - with open(file_path, "r") as f: - return json.load(f) - - -############################################################################### - - -@pytest.fixture -def bitwarden_off(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_off.json") - assert read_json(datadir / "turned_off.json")["bitwarden"]["enable"] == False - return datadir - - -@pytest.fixture -def bitwarden_on(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_on.json") - assert read_json(datadir / "turned_on.json")["bitwarden"]["enable"] == True - return datadir - - -@pytest.fixture -def bitwarden_enable_undefined(mocker, datadir): - mocker.patch( - "selfprivacy_api.utils.USERDATA_FILE", new=datadir / "enable_undefined.json" - ) - assert "enable" not in read_json(datadir / "enable_undefined.json")["bitwarden"] - return datadir - - -@pytest.fixture -def bitwarden_undefined(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "undefined.json") - assert "bitwarden" not in read_json(datadir / "undefined.json") - return datadir - - -############################################################################### - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_on_attribute_deleted( - authorized_client, bitwarden_enable_undefined, endpoint, target_file -): - response = authorized_client.post(f"/services/bitwarden/{endpoint}") - assert response.status_code == 200 - assert read_json(bitwarden_enable_undefined / "enable_undefined.json") == read_json( - bitwarden_enable_undefined / target_file - ) - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_on_bitwarden_undefined( - authorized_client, bitwarden_undefined, endpoint, target_file -): - response = authorized_client.post(f"/services/bitwarden/{endpoint}") - assert response.status_code == 200 - assert read_json(bitwarden_undefined / "undefined.json") == read_json( - bitwarden_undefined / target_file - ) diff --git a/tests/test_rest_endpoints/services/test_bitwarden/enable_undefined.json b/tests/test_rest_endpoints/services/test_bitwarden/enable_undefined.json deleted file mode 100644 index 1a95e85..0000000 --- a/tests/test_rest_endpoints/services/test_bitwarden/enable_undefined.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_bitwarden/turned_off.json b/tests/test_rest_endpoints/services/test_bitwarden/turned_off.json deleted file mode 100644 index c1691ea..0000000 --- a/tests/test_rest_endpoints/services/test_bitwarden/turned_off.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_bitwarden/turned_on.json b/tests/test_rest_endpoints/services/test_bitwarden/turned_on.json deleted file mode 100644 index 42999d8..0000000 --- a/tests/test_rest_endpoints/services/test_bitwarden/turned_on.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": true - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_bitwarden/undefined.json b/tests/test_rest_endpoints/services/test_bitwarden/undefined.json deleted file mode 100644 index ee288c2..0000000 --- a/tests/test_rest_endpoints/services/test_bitwarden/undefined.json +++ /dev/null @@ -1,54 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file From 5214d5e462c19a1905bd6e0c15a665844330c130 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 22 Nov 2023 18:13:07 +0000 Subject: [PATCH 164/246] test(services): add unauthorized move test --- tests/test_graphql/api_common.py | 89 ------------------------- tests/test_graphql/common.py | 26 ++++---- tests/test_graphql/test_api_backup.py | 2 +- tests/test_graphql/test_api_devices.py | 27 ++++---- tests/test_graphql/test_api_recovery.py | 46 ++++++++----- tests/test_graphql/test_services.py | 42 ++++-------- 6 files changed, 72 insertions(+), 160 deletions(-) delete mode 100644 tests/test_graphql/api_common.py diff --git a/tests/test_graphql/api_common.py b/tests/test_graphql/api_common.py deleted file mode 100644 index 4e4aec2..0000000 --- a/tests/test_graphql/api_common.py +++ /dev/null @@ -1,89 +0,0 @@ -from tests.common import generate_api_query -from tests.conftest import TOKENS_FILE_CONTENTS, DEVICE_WE_AUTH_TESTS_WITH - -ORIGINAL_DEVICES = TOKENS_FILE_CONTENTS["tokens"] - - -def assert_ok(response, request): - data = assert_data(response) - assert data[request]["success"] is True - assert data[request]["message"] is not None - assert data[request]["code"] == 200 - - -def assert_errorcode(response, request, code): - data = assert_data(response) - assert data[request]["success"] is False - assert data[request]["message"] is not None - assert data[request]["code"] == code - - -def assert_empty(response): - assert response.status_code == 200 - assert response.json().get("data") is None - - -def assert_data(response): - assert response.status_code == 200 - data = response.json().get("data") - assert data is not None - assert "api" in data.keys() - return data["api"] - - -API_DEVICES_QUERY = """ -devices { - creationDate - isCaller - name -} -""" - - -def request_devices(client): - return client.post( - "/graphql", - json={"query": generate_api_query([API_DEVICES_QUERY])}, - ) - - -def graphql_get_devices(client): - response = request_devices(client) - data = assert_data(response) - devices = data["devices"] - assert devices is not None - return devices - - -def set_client_token(client, token): - client.headers.update({"Authorization": "Bearer " + token}) - - -def assert_token_valid(client, token): - set_client_token(client, token) - assert graphql_get_devices(client) is not None - - -def assert_same(graphql_devices, abstract_devices): - """Orderless comparison""" - assert len(graphql_devices) == len(abstract_devices) - for original_device in abstract_devices: - assert original_device["name"] in [device["name"] for device in graphql_devices] - for device in graphql_devices: - if device["name"] == original_device["name"]: - assert device["creationDate"] == original_device["date"].isoformat() - - -def assert_original(client): - devices = graphql_get_devices(client) - assert_original_devices(devices) - - -def assert_original_devices(devices): - assert_same(devices, ORIGINAL_DEVICES) - - for device in devices: - if device["name"] == DEVICE_WE_AUTH_TESTS_WITH["name"]: - assert device["isCaller"] is True - else: - assert device["isCaller"] is False diff --git a/tests/test_graphql/common.py b/tests/test_graphql/common.py index 1a415bc..286df67 100644 --- a/tests/test_graphql/common.py +++ b/tests/test_graphql/common.py @@ -4,18 +4,20 @@ from tests.conftest import TOKENS_FILE_CONTENTS, DEVICE_WE_AUTH_TESTS_WITH ORIGINAL_DEVICES = TOKENS_FILE_CONTENTS["tokens"] -def assert_ok(response, request): - data = assert_data(response) - data[request]["success"] is True - data[request]["message"] is not None - data[request]["code"] == 200 +def assert_ok(output: dict) -> None: + if output["success"] is False: + # convenience for debugging, this should display error + # if message is empty, consider adding helpful messages + raise ValueError(output["code"], output["message"]) + assert output["success"] is True + assert output["message"] is not None + assert output["code"] == 200 -def assert_errorcode(response, request, code): - data = assert_data(response) - data[request]["success"] is False - data[request]["message"] is not None - data[request]["code"] == code +def assert_errorcode(output: dict, code) -> None: + assert output["success"] is False + assert output["message"] is not None + assert output["code"] == code def assert_empty(response): @@ -23,7 +25,7 @@ def assert_empty(response): assert response.json().get("data") is None -def assert_data(response): +def get_data(response): assert response.status_code == 200 response = response.json() @@ -54,7 +56,7 @@ def request_devices(client): def graphql_get_devices(client): response = request_devices(client) - data = assert_data(response) + data = get_data(response) devices = data["api"]["devices"] assert devices is not None return devices diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index 50d65d8..675c1b8 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -280,7 +280,7 @@ def get_data(response): if ( "errors" in response.keys() ): # convenience for debugging, this will display error - assert response["errors"] == [] + raise ValueError(response["errors"]) assert response["data"] is not None data = response["data"] return data diff --git a/tests/test_graphql/test_api_devices.py b/tests/test_graphql/test_api_devices.py index b24bc7f..ef77414 100644 --- a/tests/test_graphql/test_api_devices.py +++ b/tests/test_graphql/test_api_devices.py @@ -8,8 +8,8 @@ from tests.common import ( generate_api_query, ) from tests.conftest import DEVICE_WE_AUTH_TESTS_WITH, TOKENS_FILE_CONTENTS -from tests.test_graphql.api_common import ( - assert_data, +from tests.test_graphql.common import ( + get_data, assert_empty, assert_ok, assert_errorcode, @@ -36,7 +36,7 @@ def graphql_get_new_device_key(authorized_client) -> str: "/graphql", json={"query": NEW_DEVICE_KEY_MUTATION}, ) - assert_ok(response, "getNewDeviceApiKey") + assert_ok(get_data(response)["api"]["getNewDeviceApiKey"]) key = response.json()["data"]["api"]["getNewDeviceApiKey"]["key"] assert key.split(" ").__len__() == 12 @@ -60,9 +60,10 @@ def graphql_try_auth_new_device(client, mnemonic_key, device_name): def graphql_authorize_new_device(client, mnemonic_key, device_name) -> str: response = graphql_try_auth_new_device(client, mnemonic_key, "new_device") - assert_ok(response, "authorizeWithNewDeviceApiKey") + assert_ok(get_data(response)["api"]["authorizeWithNewDeviceApiKey"]) token = response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["token"] assert_token_valid(client, token) + return token def test_graphql_tokens_info(authorized_client, tokens_file): @@ -114,7 +115,7 @@ def test_graphql_delete_token(authorized_client, tokens_file): }, }, ) - assert_ok(response, "deleteDeviceApiToken") + assert_ok(get_data(response)["api"]["deleteDeviceApiToken"]) devices = graphql_get_devices(authorized_client) assert_same(devices, test_devices) @@ -130,7 +131,7 @@ def test_graphql_delete_self_token(authorized_client, tokens_file): }, }, ) - assert_errorcode(response, "deleteDeviceApiToken", 400) + assert_errorcode(get_data(response)["api"]["deleteDeviceApiToken"], 400) assert_original(authorized_client) @@ -147,7 +148,7 @@ def test_graphql_delete_nonexistent_token( }, }, ) - assert_errorcode(response, "deleteDeviceApiToken", 404) + assert_errorcode(get_data(response)["api"]["deleteDeviceApiToken"], 404) assert_original(authorized_client) @@ -180,7 +181,7 @@ def test_graphql_refresh_token(authorized_client, client, tokens_file): "/graphql", json={"query": REFRESH_TOKEN_MUTATION}, ) - assert_ok(response, "refreshDeviceApiToken") + assert_ok(get_data(response)["api"]["refreshDeviceApiToken"]) new_token = response.json()["data"]["api"]["refreshDeviceApiToken"]["token"] assert_token_valid(client, new_token) @@ -250,10 +251,10 @@ def test_graphql_get_and_delete_new_device_key(client, authorized_client, tokens "/graphql", json={"query": INVALIDATE_NEW_DEVICE_KEY_MUTATION}, ) - assert_ok(response, "invalidateNewDeviceApiKey") + assert_ok(get_data(response)["api"]["invalidateNewDeviceApiKey"]) response = graphql_try_auth_new_device(client, mnemonic_key, "new_device") - assert_errorcode(response, "authorizeWithNewDeviceApiKey", 404) + assert_errorcode(get_data(response)["api"]["authorizeWithNewDeviceApiKey"], 404) AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION = """ @@ -285,7 +286,7 @@ def test_graphql_authorize_new_device_with_invalid_key( client, authorized_client, tokens_file ): response = graphql_try_auth_new_device(client, "invalid_token", "new_device") - assert_errorcode(response, "authorizeWithNewDeviceApiKey", 404) + assert_errorcode(get_data(response)["api"]["authorizeWithNewDeviceApiKey"], 404) assert_original(authorized_client) @@ -297,7 +298,7 @@ def test_graphql_get_and_authorize_used_key(client, authorized_client, tokens_fi devices = graphql_get_devices(authorized_client) response = graphql_try_auth_new_device(client, mnemonic_key, "new_device2") - assert_errorcode(response, "authorizeWithNewDeviceApiKey", 404) + assert_errorcode(get_data(response)["api"]["authorizeWithNewDeviceApiKey"], 404) assert graphql_get_devices(authorized_client) == devices @@ -309,7 +310,7 @@ def test_graphql_get_and_authorize_key_after_12_minutes( mock = mocker.patch(DEVICE_KEY_VALIDATION_DATETIME, NearFuture) response = graphql_try_auth_new_device(client, mnemonic_key, "new_device") - assert_errorcode(response, "authorizeWithNewDeviceApiKey", 404) + assert_errorcode(get_data(response)["api"]["authorizeWithNewDeviceApiKey"], 404) def test_graphql_authorize_without_token( diff --git a/tests/test_graphql/test_api_recovery.py b/tests/test_graphql/test_api_recovery.py index 629bac0..f53394f 100644 --- a/tests/test_graphql/test_api_recovery.py +++ b/tests/test_graphql/test_api_recovery.py @@ -18,9 +18,9 @@ from tests.common import five_minutes_into_future_naive_utc as five_minutes_into from tests.common import five_minutes_into_future as five_minutes_into_future_tz from tests.common import five_minutes_into_past_naive_utc as five_minutes_into_past -from tests.test_graphql.api_common import ( +from tests.test_graphql.common import ( assert_empty, - assert_data, + get_data, assert_ok, assert_errorcode, assert_token_valid, @@ -49,9 +49,9 @@ def request_recovery_status(client): def graphql_recovery_status(client): response = request_recovery_status(client) - data = assert_data(response) + data = get_data(response) - status = data["recoveryKey"] + status = data["api"]["recoveryKey"] assert status is not None return status @@ -74,8 +74,10 @@ def request_make_new_recovery_key(client, expires_at=None, uses=None): def graphql_make_new_recovery_key(client, expires_at=None, uses=None): response = request_make_new_recovery_key(client, expires_at, uses) - assert_ok(response, "getNewRecoveryApiKey") - key = response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] + output = get_data(response)["api"]["getNewRecoveryApiKey"] + assert_ok(output) + + key = output["key"] assert key is not None assert key.split(" ").__len__() == 18 return key @@ -98,8 +100,10 @@ def request_recovery_auth(client, key, device_name): def graphql_use_recovery_key(client, key, device_name): response = request_recovery_auth(client, key, device_name) - assert_ok(response, "useRecoveryApiKey") - token = response.json()["data"]["api"]["useRecoveryApiKey"]["token"] + output = get_data(response)["api"]["useRecoveryApiKey"] + assert_ok(output) + + token = output["token"] assert token is not None assert_token_valid(client, token) set_client_token(client, token) @@ -198,8 +202,10 @@ def test_graphql_use_recovery_key_after_expiration( mock = mocker.patch(RECOVERY_KEY_VALIDATION_DATETIME, NearFuture) response = request_recovery_auth(client, key, "new_test_token3") - assert_errorcode(response, "useRecoveryApiKey", 404) - assert response.json()["data"]["api"]["useRecoveryApiKey"]["token"] is None + output = get_data(response)["api"]["useRecoveryApiKey"] + assert_errorcode(output, 404) + + assert output["token"] is None assert_original(authorized_client) status = graphql_recovery_status(authorized_client) @@ -222,8 +228,10 @@ def test_graphql_generate_recovery_key_with_expiration_in_the_past( authorized_client, expires_at=expiration_date ) - assert_errorcode(response, "getNewRecoveryApiKey", 400) - assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] is None + output = get_data(response)["api"]["getNewRecoveryApiKey"] + assert_errorcode(output, 400) + + assert output["key"] is None assert graphql_recovery_status(authorized_client)["exists"] is False @@ -280,7 +288,8 @@ def test_graphql_generate_recovery_key_with_limited_uses( assert status["usesLeft"] == 0 response = request_recovery_auth(client, mnemonic_key, "new_test_token3") - assert_errorcode(response, "useRecoveryApiKey", 404) + output = get_data(response)["api"]["useRecoveryApiKey"] + assert_errorcode(output, 404) def test_graphql_generate_recovery_key_with_negative_uses( @@ -288,13 +297,16 @@ def test_graphql_generate_recovery_key_with_negative_uses( ): response = request_make_new_recovery_key(authorized_client, uses=-1) - assert_errorcode(response, "getNewRecoveryApiKey", 400) - assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] is None + output = get_data(response)["api"]["getNewRecoveryApiKey"] + assert_errorcode(output, 400) + assert output["key"] is None + assert graphql_recovery_status(authorized_client)["exists"] is False def test_graphql_generate_recovery_key_with_zero_uses(authorized_client, tokens_file): response = request_make_new_recovery_key(authorized_client, uses=0) - assert_errorcode(response, "getNewRecoveryApiKey", 400) - assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] is None + output = get_data(response)["api"]["getNewRecoveryApiKey"] + assert_errorcode(output, 400) + assert output["key"] is None assert graphql_recovery_status(authorized_client)["exists"] is False diff --git a/tests/test_graphql/test_services.py b/tests/test_graphql/test_services.py index e86d070..bd3e373 100644 --- a/tests/test_graphql/test_services.py +++ b/tests/test_graphql/test_services.py @@ -10,7 +10,7 @@ from selfprivacy_api.services.test_service import DummyService from tests.test_common import raw_dummy_service, dummy_service from tests.common import generate_service_query -from tests.test_graphql.test_api_backup import assert_ok, get_data +from tests.test_graphql.common import assert_empty, assert_ok, get_data @pytest.fixture() @@ -330,52 +330,38 @@ def test_allservices_unauthorized(client, only_dummy_service): def test_start_unauthorized(client, only_dummy_service): dummy_service = only_dummy_service - mutation_response = api_start(client, dummy_service) - - assert mutation_response.status_code == 200 - assert mutation_response.json().get("data") is None + response = api_start(client, dummy_service) + assert_empty(response) def test_restart_unauthorized(client, only_dummy_service): dummy_service = only_dummy_service - mutation_response = api_restart(client, dummy_service) - - assert mutation_response.status_code == 200 - assert mutation_response.json().get("data") is None + response = api_restart(client, dummy_service) + assert_empty(response) def test_stop_unauthorized(client, only_dummy_service): dummy_service = only_dummy_service - mutation_response = api_stop(client, dummy_service) - - assert mutation_response.status_code == 200 - assert mutation_response.json().get("data") is None + response = api_stop(client, dummy_service) + assert_empty(response) def test_enable_unauthorized(client, only_dummy_service): dummy_service = only_dummy_service - mutation_response = api_enable(client, dummy_service) - - assert mutation_response.status_code == 200 - assert mutation_response.json().get("data") is None + response = api_enable(client, dummy_service) + assert_empty(response) def test_disable_unauthorized(client, only_dummy_service): dummy_service = only_dummy_service - mutation_response = api_disable(client, dummy_service) - - assert mutation_response.status_code == 200 - assert mutation_response.json().get("data") is None + response = api_disable(client, dummy_service) + assert_empty(response) -def test_move_nonexistent(authorized_client, only_dummy_service): +def test_move_unauthorized(client, only_dummy_service): dummy_service = only_dummy_service - mutation_response = api_move_by_name(authorized_client, "bogus_service", "sda1") - data = get_data(mutation_response)["services"]["moveService"] - assert_notfound(data) - - assert data["service"] is None - assert data["job"] is None + response = api_move(client, dummy_service, "sda1") + assert_empty(response) def test_start_nonexistent(authorized_client, only_dummy_service): From 7038d69069917f5db6ae7541b5d93ac53bc1cc58 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 22 Nov 2023 18:19:03 +0000 Subject: [PATCH 165/246] test(services): remove redundant gitea tests --- .../services/test_gitea.py | 121 ------------------ .../services/test_gitea/enable_undefined.json | 56 -------- .../services/test_gitea/turned_off.json | 57 --------- .../services/test_gitea/turned_on.json | 57 --------- .../services/test_gitea/undefined.json | 54 -------- 5 files changed, 345 deletions(-) delete mode 100644 tests/test_rest_endpoints/services/test_gitea.py delete mode 100644 tests/test_rest_endpoints/services/test_gitea/enable_undefined.json delete mode 100644 tests/test_rest_endpoints/services/test_gitea/turned_off.json delete mode 100644 tests/test_rest_endpoints/services/test_gitea/turned_on.json delete mode 100644 tests/test_rest_endpoints/services/test_gitea/undefined.json diff --git a/tests/test_rest_endpoints/services/test_gitea.py b/tests/test_rest_endpoints/services/test_gitea.py deleted file mode 100644 index 0a50c19..0000000 --- a/tests/test_rest_endpoints/services/test_gitea.py +++ /dev/null @@ -1,121 +0,0 @@ -import json -import pytest - - -def read_json(file_path): - with open(file_path, "r") as f: - return json.load(f) - - -############################################################################### - - -@pytest.fixture -def gitea_off(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_off.json") - assert read_json(datadir / "turned_off.json")["gitea"]["enable"] == False - return datadir - - -@pytest.fixture -def gitea_on(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_on.json") - assert read_json(datadir / "turned_on.json")["gitea"]["enable"] == True - return datadir - - -@pytest.fixture -def gitea_enable_undefined(mocker, datadir): - mocker.patch( - "selfprivacy_api.utils.USERDATA_FILE", new=datadir / "enable_undefined.json" - ) - assert "enable" not in read_json(datadir / "enable_undefined.json")["gitea"] - return datadir - - -@pytest.fixture -def gitea_undefined(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "undefined.json") - assert "gitea" not in read_json(datadir / "undefined.json") - return datadir - - -############################################################################### - - -@pytest.mark.parametrize("endpoint", ["enable", "disable"]) -def test_unauthorized(client, gitea_off, endpoint): - response = client.post(f"/services/gitea/{endpoint}") - assert response.status_code == 401 - - -@pytest.mark.parametrize("endpoint", ["enable", "disable"]) -def test_illegal_methods(authorized_client, gitea_off, endpoint): - response = authorized_client.get(f"/services/gitea/{endpoint}") - assert response.status_code == 405 - response = authorized_client.put(f"/services/gitea/{endpoint}") - assert response.status_code == 405 - response = authorized_client.delete(f"/services/gitea/{endpoint}") - assert response.status_code == 405 - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_switch_from_off(authorized_client, gitea_off, endpoint, target_file): - response = authorized_client.post(f"/services/gitea/{endpoint}") - assert response.status_code == 200 - assert read_json(gitea_off / "turned_off.json") == read_json( - gitea_off / target_file - ) - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_switch_from_on(authorized_client, gitea_on, endpoint, target_file): - response = authorized_client.post(f"/services/gitea/{endpoint}") - assert response.status_code == 200 - assert read_json(gitea_on / "turned_on.json") == read_json(gitea_on / target_file) - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_switch_twice(authorized_client, gitea_off, endpoint, target_file): - response = authorized_client.post(f"/services/gitea/{endpoint}") - assert response.status_code == 200 - response = authorized_client.post(f"/services/gitea/{endpoint}") - assert response.status_code == 200 - assert read_json(gitea_off / "turned_off.json") == read_json( - gitea_off / target_file - ) - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_on_attribute_deleted( - authorized_client, gitea_enable_undefined, endpoint, target_file -): - response = authorized_client.post(f"/services/gitea/{endpoint}") - assert response.status_code == 200 - assert read_json(gitea_enable_undefined / "enable_undefined.json") == read_json( - gitea_enable_undefined / target_file - ) - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_on_gitea_undefined(authorized_client, gitea_undefined, endpoint, target_file): - response = authorized_client.post(f"/services/gitea/{endpoint}") - assert response.status_code == 200 - assert read_json(gitea_undefined / "undefined.json") == read_json( - gitea_undefined / target_file - ) diff --git a/tests/test_rest_endpoints/services/test_gitea/enable_undefined.json b/tests/test_rest_endpoints/services/test_gitea/enable_undefined.json deleted file mode 100644 index f9fb878..0000000 --- a/tests/test_rest_endpoints/services/test_gitea/enable_undefined.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_gitea/turned_off.json b/tests/test_rest_endpoints/services/test_gitea/turned_off.json deleted file mode 100644 index c1691ea..0000000 --- a/tests/test_rest_endpoints/services/test_gitea/turned_off.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_gitea/turned_on.json b/tests/test_rest_endpoints/services/test_gitea/turned_on.json deleted file mode 100644 index f9a1eaf..0000000 --- a/tests/test_rest_endpoints/services/test_gitea/turned_on.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": true - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_gitea/undefined.json b/tests/test_rest_endpoints/services/test_gitea/undefined.json deleted file mode 100644 index a50a070..0000000 --- a/tests/test_rest_endpoints/services/test_gitea/undefined.json +++ /dev/null @@ -1,54 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file From 9f04729296e6e95913c5035c4393f42e5fb46354 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 24 Nov 2023 11:26:13 +0000 Subject: [PATCH 166/246] test(services, system): untie dkim tests from rest --- selfprivacy_api/utils/__init__.py | 4 +- tests/data/domain | 1 + tests/test_dkim.py | 119 ++++++++++++++++++ tests/test_graphql/test_system.py | 24 ++++ .../services/test_mailserver.py | 102 --------------- 5 files changed, 147 insertions(+), 103 deletions(-) create mode 100644 tests/data/domain create mode 100644 tests/test_dkim.py delete mode 100644 tests/test_rest_endpoints/services/test_mailserver.py diff --git a/selfprivacy_api/utils/__init__.py b/selfprivacy_api/utils/__init__.py index 40ed5b6..5263b89 100644 --- a/selfprivacy_api/utils/__init__.py +++ b/selfprivacy_api/utils/__init__.py @@ -6,6 +6,7 @@ import json import os import subprocess import portalocker +import typing USERDATA_FILE = "/etc/nixos/userdata/userdata.json" @@ -166,9 +167,10 @@ def parse_date(date_str: str) -> datetime.datetime: raise ValueError("Invalid date string") -def get_dkim_key(domain, parse=True): +def get_dkim_key(domain: str, parse: bool = True) -> typing.Optional[str]: """Get DKIM key from /var/dkim/.selector.txt""" if os.path.exists("/var/dkim/" + domain + ".selector.txt"): + # Is this really neccessary to use Popen here? cat_process = subprocess.Popen( ["cat", "/var/dkim/" + domain + ".selector.txt"], stdout=subprocess.PIPE ) diff --git a/tests/data/domain b/tests/data/domain new file mode 100644 index 0000000..3679d0d --- /dev/null +++ b/tests/data/domain @@ -0,0 +1 @@ +test-domain.tld \ No newline at end of file diff --git a/tests/test_dkim.py b/tests/test_dkim.py new file mode 100644 index 0000000..c9662d0 --- /dev/null +++ b/tests/test_dkim.py @@ -0,0 +1,119 @@ +import pytest +import typing + +from os import path +from unittest.mock import DEFAULT +from tests.conftest import global_data_dir + +from selfprivacy_api.utils import get_dkim_key, get_domain +import selfprivacy_api.utils as utils + +############################################################################### + + +class ProcessMock: + """Mock subprocess.Popen""" + + def __init__(self, args, **kwargs): + self.args = args + self.kwargs = kwargs + + def communicate(): + return ( + b'selector._domainkey\tIN\tTXT\t( "v=DKIM1; k=rsa; "\n\t "p=MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDNn/IhEz1SxgHxxxI8vlPYC2dNueiLe1GC4SYz8uHimC8SDkMvAwm7rqi2SimbFgGB5nccCNOqCkrIqJTCB9vufqBnVKAjshHqpOr5hk4JJ1T/AGQKWinstmDbfTLPYTbU8ijZrwwGeqQLlnXR5nSN0GB9GazheA9zaPsT6PV+aQIDAQAB" ) ; ----- DKIM key selector for test-domain.tld\n', + None, + ) + + +class NoFileMock(ProcessMock): + def communicate(): + return (b"", None) + + +def _path_exists_with_masked_paths(filepath, masked_paths: typing.List[str]): + if filepath in masked_paths: + return False + else: + # this will cause the mocker to return the standard path.exists output + # see https://docs.python.org/3/library/unittest.mock.html#unittest.mock.Mock.side_effect + return DEFAULT + + +def path_exists_func_but_with_masked_paths(masked_paths: typing.List[str]): + """ + Sometimes we do not want to pretend that no files exist at all, but that only specific files do not exist + This provides the needed path.exists function for some arbitrary list of masked paths + """ + return lambda x: _path_exists_with_masked_paths(x, masked_paths) + + +@pytest.fixture +def mock_all_paths_exist(mocker): + mock = mocker.patch("os.path.exists", autospec=True, return_value=True) + return mock + + +@pytest.fixture +def mock_subproccess_popen_dkimfile(mocker): + mock = mocker.patch("subprocess.Popen", autospec=True, return_value=ProcessMock) + return mock + + +@pytest.fixture +def mock_subproccess_popen(mocker): + mock = mocker.patch("subprocess.Popen", autospec=True, return_value=NoFileMock) + return mock + + +@pytest.fixture +def domain_file(mocker): + # TODO: move to conftest. Challenge: it does not behave with "/" like pytest datadir does + domain_path = path.join(global_data_dir(), "domain") + mocker.patch("selfprivacy_api.utils.DOMAIN_FILE", domain_path) + return domain_path + + +@pytest.fixture +def mock_no_dkim_file(mocker): + """ + Should have domain mocks + """ + domain = utils.get_domain() + # try: + # domain = get_domain() + # except Exception as e: + # domain = "" + + masked_files = ["/var/dkim/" + domain + ".selector.txt"] + mock = mocker.patch( + "os.path.exists", + side_effect=path_exists_func_but_with_masked_paths(masked_files), + ) + return mock + + +############################################################################### + + +def test_get_dkim_key( + mock_subproccess_popen_dkimfile, mock_all_paths_exist, domain_file +): + """Test DKIM key""" + dkim_key = get_dkim_key("test-domain.tld") + assert ( + dkim_key + == "v=DKIM1; k=rsa; p=MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDNn/IhEz1SxgHxxxI8vlPYC2dNueiLe1GC4SYz8uHimC8SDkMvAwm7rqi2SimbFgGB5nccCNOqCkrIqJTCB9vufqBnVKAjshHqpOr5hk4JJ1T/AGQKWinstmDbfTLPYTbU8ijZrwwGeqQLlnXR5nSN0GB9GazheA9zaPsT6PV+aQIDAQAB" + ) + assert mock_subproccess_popen_dkimfile.call_args[0][0] == [ + "cat", + "/var/dkim/test-domain.tld.selector.txt", + ] + + +def test_no_dkim_key( + authorized_client, domain_file, mock_no_dkim_file, mock_subproccess_popen +): + """Test no DKIM key""" + dkim_key = get_dkim_key("test-domain.tld") + assert dkim_key is None + assert mock_subproccess_popen.called == False diff --git a/tests/test_graphql/test_system.py b/tests/test_graphql/test_system.py index ed00268..b6b4362 100644 --- a/tests/test_graphql/test_system.py +++ b/tests/test_graphql/test_system.py @@ -6,6 +6,7 @@ import pytest from tests.common import generate_system_query, read_json from tests.test_graphql.common import assert_empty +from tests.test_dkim import mock_no_dkim_file @pytest.fixture @@ -332,6 +333,29 @@ def test_graphql_get_domain( ) +def test_graphql_get_domain_no_dkim( + authorized_client, + domain_file, + mock_get_ip4, + mock_get_ip6, + mock_no_dkim_file, + turned_on, +): + """Test no DKIM file situation gets properly handled""" + response = authorized_client.post( + "/graphql", + json={ + "query": generate_system_query([API_GET_DOMAIN_INFO]), + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + dns_records = response.json()["data"]["system"]["domainInfo"]["requiredDnsRecords"] + for record in dns_records: + if record["name"] == "selector._domainkey": + raise ValueError("unexpected record found:", record) + + API_GET_TIMEZONE = """ settings { timezone diff --git a/tests/test_rest_endpoints/services/test_mailserver.py b/tests/test_rest_endpoints/services/test_mailserver.py deleted file mode 100644 index 2803683..0000000 --- a/tests/test_rest_endpoints/services/test_mailserver.py +++ /dev/null @@ -1,102 +0,0 @@ -import base64 -import json -import pytest - -from selfprivacy_api.utils import get_dkim_key - -############################################################################### - - -class ProcessMock: - """Mock subprocess.Popen""" - - def __init__(self, args, **kwargs): - self.args = args - self.kwargs = kwargs - - def communicate(): - return ( - b'selector._domainkey\tIN\tTXT\t( "v=DKIM1; k=rsa; "\n\t "p=MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDNn/IhEz1SxgHxxxI8vlPYC2dNueiLe1GC4SYz8uHimC8SDkMvAwm7rqi2SimbFgGB5nccCNOqCkrIqJTCB9vufqBnVKAjshHqpOr5hk4JJ1T/AGQKWinstmDbfTLPYTbU8ijZrwwGeqQLlnXR5nSN0GB9GazheA9zaPsT6PV+aQIDAQAB" ) ; ----- DKIM key selector for example.com\n', - None, - ) - - -class NoFileMock(ProcessMock): - def communicate(): - return (b"", None) - - -@pytest.fixture -def mock_subproccess_popen(mocker): - mock = mocker.patch("subprocess.Popen", autospec=True, return_value=ProcessMock) - mocker.patch( - "selfprivacy_api.rest.services.get_domain", - autospec=True, - return_value="example.com", - ) - mocker.patch("os.path.exists", autospec=True, return_value=True) - return mock - - -@pytest.fixture -def mock_no_file(mocker): - mock = mocker.patch("subprocess.Popen", autospec=True, return_value=NoFileMock) - mocker.patch( - "selfprivacy_api.rest.services.get_domain", - autospec=True, - return_value="example.com", - ) - mocker.patch("os.path.exists", autospec=True, return_value=False) - return mock - - -############################################################################### - - -def test_unauthorized(client, mock_subproccess_popen): - """Test unauthorized""" - response = client.get("/services/mailserver/dkim") - assert response.status_code == 401 - - -def test_illegal_methods(authorized_client, mock_subproccess_popen): - response = authorized_client.post("/services/mailserver/dkim") - assert response.status_code == 405 - response = authorized_client.put("/services/mailserver/dkim") - assert response.status_code == 405 - response = authorized_client.delete("/services/mailserver/dkim") - assert response.status_code == 405 - - -def test_get_dkim_key(mock_subproccess_popen): - """Test DKIM key""" - dkim_key = get_dkim_key("example.com") - assert ( - dkim_key - == "v=DKIM1; k=rsa; p=MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDNn/IhEz1SxgHxxxI8vlPYC2dNueiLe1GC4SYz8uHimC8SDkMvAwm7rqi2SimbFgGB5nccCNOqCkrIqJTCB9vufqBnVKAjshHqpOr5hk4JJ1T/AGQKWinstmDbfTLPYTbU8ijZrwwGeqQLlnXR5nSN0GB9GazheA9zaPsT6PV+aQIDAQAB" - ) - assert mock_subproccess_popen.call_args[0][0] == [ - "cat", - "/var/dkim/example.com.selector.txt", - ] - - -def test_dkim_key(authorized_client, mock_subproccess_popen): - """Test old REST DKIM key endpoint""" - response = authorized_client.get("/services/mailserver/dkim") - assert response.status_code == 200 - assert ( - base64.b64decode(response.text) - == b'selector._domainkey\tIN\tTXT\t( "v=DKIM1; k=rsa; "\n\t "p=MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDNn/IhEz1SxgHxxxI8vlPYC2dNueiLe1GC4SYz8uHimC8SDkMvAwm7rqi2SimbFgGB5nccCNOqCkrIqJTCB9vufqBnVKAjshHqpOr5hk4JJ1T/AGQKWinstmDbfTLPYTbU8ijZrwwGeqQLlnXR5nSN0GB9GazheA9zaPsT6PV+aQIDAQAB" ) ; ----- DKIM key selector for example.com\n' - ) - assert mock_subproccess_popen.call_args[0][0] == [ - "cat", - "/var/dkim/example.com.selector.txt", - ] - - -def test_no_dkim_key(authorized_client, mock_no_file): - """Test no DKIM key""" - response = authorized_client.get("/services/mailserver/dkim") - assert response.status_code == 404 - assert mock_no_file.called == False From e63acc6d5629571d2970bc580d259fc2e348a656 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 24 Nov 2023 11:31:49 +0000 Subject: [PATCH 167/246] test(services): remove redundant nextcloud tests --- .../services/test_nextcloud.py | 123 ------------------ .../test_nextcloud/enable_undefined.json | 56 -------- .../services/test_nextcloud/turned_off.json | 57 -------- .../services/test_nextcloud/turned_on.json | 57 -------- .../services/test_nextcloud/undefined.json | 49 ------- 5 files changed, 342 deletions(-) delete mode 100644 tests/test_rest_endpoints/services/test_nextcloud.py delete mode 100644 tests/test_rest_endpoints/services/test_nextcloud/enable_undefined.json delete mode 100644 tests/test_rest_endpoints/services/test_nextcloud/turned_off.json delete mode 100644 tests/test_rest_endpoints/services/test_nextcloud/turned_on.json delete mode 100644 tests/test_rest_endpoints/services/test_nextcloud/undefined.json diff --git a/tests/test_rest_endpoints/services/test_nextcloud.py b/tests/test_rest_endpoints/services/test_nextcloud.py deleted file mode 100644 index b05c363..0000000 --- a/tests/test_rest_endpoints/services/test_nextcloud.py +++ /dev/null @@ -1,123 +0,0 @@ -import json -import pytest - - -def read_json(file_path): - with open(file_path, "r") as f: - return json.load(f) - - -############################################################################### - - -@pytest.fixture -def nextcloud_off(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_off.json") - assert read_json(datadir / "turned_off.json")["nextcloud"]["enable"] == False - return datadir - - -@pytest.fixture -def nextcloud_on(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_on.json") - assert read_json(datadir / "turned_on.json")["nextcloud"]["enable"] == True - return datadir - - -@pytest.fixture -def nextcloud_enable_undefined(mocker, datadir): - mocker.patch( - "selfprivacy_api.utils.USERDATA_FILE", new=datadir / "enable_undefined.json" - ) - assert "enable" not in read_json(datadir / "enable_undefined.json")["nextcloud"] - return datadir - - -@pytest.fixture -def nextcloud_undefined(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "undefined.json") - assert "nextcloud" not in read_json(datadir / "undefined.json") - return datadir - - -############################################################################### - - -@pytest.mark.parametrize("endpoint", ["enable", "disable"]) -def test_unauthorized(client, nextcloud_off, endpoint): - response = client.post(f"/services/nextcloud/{endpoint}") - assert response.status_code == 401 - - -@pytest.mark.parametrize("endpoint", ["enable", "disable"]) -def test_illegal_methods(authorized_client, nextcloud_off, endpoint): - response = authorized_client.get(f"/services/nextcloud/{endpoint}") - assert response.status_code == 405 - response = authorized_client.put(f"/services/nextcloud/{endpoint}") - assert response.status_code == 405 - response = authorized_client.delete(f"/services/nextcloud/{endpoint}") - assert response.status_code == 405 - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_switch_from_off(authorized_client, nextcloud_off, endpoint, target_file): - response = authorized_client.post(f"/services/nextcloud/{endpoint}") - assert response.status_code == 200 - assert read_json(nextcloud_off / "turned_off.json") == read_json( - nextcloud_off / target_file - ) - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_switch_from_on(authorized_client, nextcloud_on, endpoint, target_file): - response = authorized_client.post(f"/services/nextcloud/{endpoint}") - assert response.status_code == 200 - assert read_json(nextcloud_on / "turned_on.json") == read_json( - nextcloud_on / target_file - ) - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_switch_twice(authorized_client, nextcloud_off, endpoint, target_file): - response = authorized_client.post(f"/services/nextcloud/{endpoint}") - assert response.status_code == 200 - response = authorized_client.post(f"/services/nextcloud/{endpoint}") - assert response.status_code == 200 - assert read_json(nextcloud_off / "turned_off.json") == read_json( - nextcloud_off / target_file - ) - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_on_attribute_deleted( - authorized_client, nextcloud_enable_undefined, endpoint, target_file -): - response = authorized_client.post(f"/services/nextcloud/{endpoint}") - assert response.status_code == 200 - assert read_json(nextcloud_enable_undefined / "enable_undefined.json") == read_json( - nextcloud_enable_undefined / target_file - ) - - -@pytest.mark.parametrize("endpoint,target", [("enable", True), ("disable", False)]) -def test_on_nextcloud_undefined( - authorized_client, nextcloud_undefined, endpoint, target -): - response = authorized_client.post(f"/services/nextcloud/{endpoint}") - assert response.status_code == 200 - assert ( - read_json(nextcloud_undefined / "undefined.json")["nextcloud"]["enable"] - == target - ) diff --git a/tests/test_rest_endpoints/services/test_nextcloud/enable_undefined.json b/tests/test_rest_endpoints/services/test_nextcloud/enable_undefined.json deleted file mode 100644 index 19f1f2d..0000000 --- a/tests/test_rest_endpoints/services/test_nextcloud/enable_undefined.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN" - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_nextcloud/turned_off.json b/tests/test_rest_endpoints/services/test_nextcloud/turned_off.json deleted file mode 100644 index b80ad9e..0000000 --- a/tests/test_rest_endpoints/services/test_nextcloud/turned_off.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": false - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_nextcloud/turned_on.json b/tests/test_rest_endpoints/services/test_nextcloud/turned_on.json deleted file mode 100644 index c1691ea..0000000 --- a/tests/test_rest_endpoints/services/test_nextcloud/turned_on.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_nextcloud/undefined.json b/tests/test_rest_endpoints/services/test_nextcloud/undefined.json deleted file mode 100644 index 46c09f3..0000000 --- a/tests/test_rest_endpoints/services/test_nextcloud/undefined.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file From 15eafbb524a770fe723f7cab95b1a80fe61fba77 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 24 Nov 2023 11:34:06 +0000 Subject: [PATCH 168/246] test(services): remove redundant ocserv tests --- .../services/test_ocserv.py | 123 ------------------ .../test_ocserv/enable_undefined.json | 56 -------- .../services/test_ocserv/turned_off.json | 57 -------- .../services/test_ocserv/turned_on.json | 57 -------- .../services/test_ocserv/undefined.json | 54 -------- 5 files changed, 347 deletions(-) delete mode 100644 tests/test_rest_endpoints/services/test_ocserv.py delete mode 100644 tests/test_rest_endpoints/services/test_ocserv/enable_undefined.json delete mode 100644 tests/test_rest_endpoints/services/test_ocserv/turned_off.json delete mode 100644 tests/test_rest_endpoints/services/test_ocserv/turned_on.json delete mode 100644 tests/test_rest_endpoints/services/test_ocserv/undefined.json diff --git a/tests/test_rest_endpoints/services/test_ocserv.py b/tests/test_rest_endpoints/services/test_ocserv.py deleted file mode 100644 index 8f43e70..0000000 --- a/tests/test_rest_endpoints/services/test_ocserv.py +++ /dev/null @@ -1,123 +0,0 @@ -import json -import pytest - - -def read_json(file_path): - with open(file_path, "r") as f: - return json.load(f) - - -############################################################################### - - -@pytest.fixture -def ocserv_off(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_off.json") - assert read_json(datadir / "turned_off.json")["ocserv"]["enable"] == False - return datadir - - -@pytest.fixture -def ocserv_on(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_on.json") - assert read_json(datadir / "turned_on.json")["ocserv"]["enable"] == True - return datadir - - -@pytest.fixture -def ocserv_enable_undefined(mocker, datadir): - mocker.patch( - "selfprivacy_api.utils.USERDATA_FILE", new=datadir / "enable_undefined.json" - ) - assert "enable" not in read_json(datadir / "enable_undefined.json")["ocserv"] - return datadir - - -@pytest.fixture -def ocserv_undefined(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "undefined.json") - assert "ocserv" not in read_json(datadir / "undefined.json") - return datadir - - -############################################################################### - - -@pytest.mark.parametrize("endpoint", ["enable", "disable"]) -def test_unauthorized(client, ocserv_off, endpoint): - response = client.post(f"/services/ocserv/{endpoint}") - assert response.status_code == 401 - - -@pytest.mark.parametrize("endpoint", ["enable", "disable"]) -def test_illegal_methods(authorized_client, ocserv_off, endpoint): - response = authorized_client.get(f"/services/ocserv/{endpoint}") - assert response.status_code == 405 - response = authorized_client.put(f"/services/ocserv/{endpoint}") - assert response.status_code == 405 - response = authorized_client.delete(f"/services/ocserv/{endpoint}") - assert response.status_code == 405 - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_switch_from_off(authorized_client, ocserv_off, endpoint, target_file): - response = authorized_client.post(f"/services/ocserv/{endpoint}") - assert response.status_code == 200 - assert read_json(ocserv_off / "turned_off.json") == read_json( - ocserv_off / target_file - ) - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_switch_from_on(authorized_client, ocserv_on, endpoint, target_file): - response = authorized_client.post(f"/services/ocserv/{endpoint}") - assert response.status_code == 200 - assert read_json(ocserv_on / "turned_on.json") == read_json(ocserv_on / target_file) - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_switch_twice(authorized_client, ocserv_off, endpoint, target_file): - response = authorized_client.post(f"/services/ocserv/{endpoint}") - assert response.status_code == 200 - response = authorized_client.post(f"/services/ocserv/{endpoint}") - assert response.status_code == 200 - assert read_json(ocserv_off / "turned_off.json") == read_json( - ocserv_off / target_file - ) - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_on_attribute_deleted( - authorized_client, ocserv_enable_undefined, endpoint, target_file -): - response = authorized_client.post(f"/services/ocserv/{endpoint}") - assert response.status_code == 200 - assert read_json(ocserv_enable_undefined / "enable_undefined.json") == read_json( - ocserv_enable_undefined / target_file - ) - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_on_ocserv_undefined( - authorized_client, ocserv_undefined, endpoint, target_file -): - response = authorized_client.post(f"/services/ocserv/{endpoint}") - assert response.status_code == 200 - assert read_json(ocserv_undefined / "undefined.json") == read_json( - ocserv_undefined / target_file - ) diff --git a/tests/test_rest_endpoints/services/test_ocserv/enable_undefined.json b/tests/test_rest_endpoints/services/test_ocserv/enable_undefined.json deleted file mode 100644 index e080110..0000000 --- a/tests/test_rest_endpoints/services/test_ocserv/enable_undefined.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": false - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ocserv/turned_off.json b/tests/test_rest_endpoints/services/test_ocserv/turned_off.json deleted file mode 100644 index 1c08123..0000000 --- a/tests/test_rest_endpoints/services/test_ocserv/turned_off.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": false - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": false - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ocserv/turned_on.json b/tests/test_rest_endpoints/services/test_ocserv/turned_on.json deleted file mode 100644 index b80ad9e..0000000 --- a/tests/test_rest_endpoints/services/test_ocserv/turned_on.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": false - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ocserv/undefined.json b/tests/test_rest_endpoints/services/test_ocserv/undefined.json deleted file mode 100644 index 12eb73a..0000000 --- a/tests/test_rest_endpoints/services/test_ocserv/undefined.json +++ /dev/null @@ -1,54 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": false - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file From 113f512565fc0e0efee73c747e0216ba4367ffba Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 24 Nov 2023 11:37:29 +0000 Subject: [PATCH 169/246] test(services): remove redundant pleroma tests --- .../services/test_pleroma.py | 125 ------------------ .../test_pleroma/enable_undefined.json | 56 -------- .../services/test_pleroma/turned_off.json | 57 -------- .../services/test_pleroma/turned_on.json | 57 -------- .../services/test_pleroma/undefined.json | 54 -------- 5 files changed, 349 deletions(-) delete mode 100644 tests/test_rest_endpoints/services/test_pleroma.py delete mode 100644 tests/test_rest_endpoints/services/test_pleroma/enable_undefined.json delete mode 100644 tests/test_rest_endpoints/services/test_pleroma/turned_off.json delete mode 100644 tests/test_rest_endpoints/services/test_pleroma/turned_on.json delete mode 100644 tests/test_rest_endpoints/services/test_pleroma/undefined.json diff --git a/tests/test_rest_endpoints/services/test_pleroma.py b/tests/test_rest_endpoints/services/test_pleroma.py deleted file mode 100644 index 0d7f149..0000000 --- a/tests/test_rest_endpoints/services/test_pleroma.py +++ /dev/null @@ -1,125 +0,0 @@ -import json -import pytest - - -def read_json(file_path): - with open(file_path, "r") as f: - return json.load(f) - - -############################################################################### - - -@pytest.fixture -def pleroma_off(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_off.json") - assert read_json(datadir / "turned_off.json")["pleroma"]["enable"] == False - return datadir - - -@pytest.fixture -def pleroma_on(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_on.json") - assert read_json(datadir / "turned_on.json")["pleroma"]["enable"] == True - return datadir - - -@pytest.fixture -def pleroma_enable_undefined(mocker, datadir): - mocker.patch( - "selfprivacy_api.utils.USERDATA_FILE", new=datadir / "enable_undefined.json" - ) - assert "enable" not in read_json(datadir / "enable_undefined.json")["pleroma"] - return datadir - - -@pytest.fixture -def pleroma_undefined(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "undefined.json") - assert "pleroma" not in read_json(datadir / "undefined.json") - return datadir - - -############################################################################### - - -@pytest.mark.parametrize("endpoint", ["enable", "disable"]) -def test_unauthorized(client, pleroma_off, endpoint): - response = client.post(f"/services/pleroma/{endpoint}") - assert response.status_code == 401 - - -@pytest.mark.parametrize("endpoint", ["enable", "disable"]) -def test_illegal_methods(authorized_client, pleroma_off, endpoint): - response = authorized_client.get(f"/services/pleroma/{endpoint}") - assert response.status_code == 405 - response = authorized_client.put(f"/services/pleroma/{endpoint}") - assert response.status_code == 405 - response = authorized_client.delete(f"/services/pleroma/{endpoint}") - assert response.status_code == 405 - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_switch_from_off(authorized_client, pleroma_off, endpoint, target_file): - response = authorized_client.post(f"/services/pleroma/{endpoint}") - assert response.status_code == 200 - assert read_json(pleroma_off / "turned_off.json") == read_json( - pleroma_off / target_file - ) - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_switch_from_on(authorized_client, pleroma_on, endpoint, target_file): - response = authorized_client.post(f"/services/pleroma/{endpoint}") - assert response.status_code == 200 - assert read_json(pleroma_on / "turned_on.json") == read_json( - pleroma_on / target_file - ) - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_switch_twice(authorized_client, pleroma_off, endpoint, target_file): - response = authorized_client.post(f"/services/pleroma/{endpoint}") - assert response.status_code == 200 - response = authorized_client.post(f"/services/pleroma/{endpoint}") - assert response.status_code == 200 - assert read_json(pleroma_off / "turned_off.json") == read_json( - pleroma_off / target_file - ) - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_on_attribute_deleted( - authorized_client, pleroma_enable_undefined, endpoint, target_file -): - response = authorized_client.post(f"/services/pleroma/{endpoint}") - assert response.status_code == 200 - assert read_json(pleroma_enable_undefined / "enable_undefined.json") == read_json( - pleroma_enable_undefined / target_file - ) - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_on_pleroma_undefined( - authorized_client, pleroma_undefined, endpoint, target_file -): - response = authorized_client.post(f"/services/pleroma/{endpoint}") - assert response.status_code == 200 - assert read_json(pleroma_undefined / "undefined.json") == read_json( - pleroma_undefined / target_file - ) diff --git a/tests/test_rest_endpoints/services/test_pleroma/enable_undefined.json b/tests/test_rest_endpoints/services/test_pleroma/enable_undefined.json deleted file mode 100644 index 0903875..0000000 --- a/tests/test_rest_endpoints/services/test_pleroma/enable_undefined.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": false - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": false - }, - "pleroma": { - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_pleroma/turned_off.json b/tests/test_rest_endpoints/services/test_pleroma/turned_off.json deleted file mode 100644 index 813c01f..0000000 --- a/tests/test_rest_endpoints/services/test_pleroma/turned_off.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": false - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": false - }, - "pleroma": { - "enable": false - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_pleroma/turned_on.json b/tests/test_rest_endpoints/services/test_pleroma/turned_on.json deleted file mode 100644 index 1c08123..0000000 --- a/tests/test_rest_endpoints/services/test_pleroma/turned_on.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": false - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": false - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_pleroma/undefined.json b/tests/test_rest_endpoints/services/test_pleroma/undefined.json deleted file mode 100644 index 77d8ad2..0000000 --- a/tests/test_rest_endpoints/services/test_pleroma/undefined.json +++ /dev/null @@ -1,54 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": false - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": false - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file From 80e00740fb889e3b1ea7d24f6e79b6930eafcd0a Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 24 Nov 2023 11:54:18 +0000 Subject: [PATCH 170/246] test(services): remove legacy restic test data --- .../services/test_restic/no_values.json | 72 ------------------ .../services/test_restic/some_values.json | 76 ------------------- .../services/test_restic/undefined.json | 70 ----------------- 3 files changed, 218 deletions(-) delete mode 100644 tests/test_rest_endpoints/services/test_restic/no_values.json delete mode 100644 tests/test_rest_endpoints/services/test_restic/some_values.json delete mode 100644 tests/test_rest_endpoints/services/test_restic/undefined.json diff --git a/tests/test_rest_endpoints/services/test_restic/no_values.json b/tests/test_rest_endpoints/services/test_restic/no_values.json deleted file mode 100644 index 3b4a2f5..0000000 --- a/tests/test_rest_endpoints/services/test_restic/no_values.json +++ /dev/null @@ -1,72 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "users": [ - { - "username": "user1", - "hashedPassword": "HASHED_PASSWORD_1", - "sshKeys": [ - "ssh-rsa KEY user1@pc" - ] - }, - { - "username": "user2", - "hashedPassword": "HASHED_PASSWORD_2", - "sshKeys": [ - ] - }, - { - "username": "user3", - "hashedPassword": "HASHED_PASSWORD_3" - } - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_restic/some_values.json b/tests/test_rest_endpoints/services/test_restic/some_values.json deleted file mode 100644 index c003d10..0000000 --- a/tests/test_rest_endpoints/services/test_restic/some_values.json +++ /dev/null @@ -1,76 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "users": [ - { - "username": "user1", - "hashedPassword": "HASHED_PASSWORD_1", - "sshKeys": [ - "ssh-rsa KEY user1@pc" - ] - }, - { - "username": "user2", - "hashedPassword": "HASHED_PASSWORD_2", - "sshKeys": [ - ] - }, - { - "username": "user3", - "hashedPassword": "HASHED_PASSWORD_3" - } - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "BUCKET" - } -} diff --git a/tests/test_rest_endpoints/services/test_restic/undefined.json b/tests/test_rest_endpoints/services/test_restic/undefined.json deleted file mode 100644 index 5bd1220..0000000 --- a/tests/test_rest_endpoints/services/test_restic/undefined.json +++ /dev/null @@ -1,70 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "users": [ - { - "username": "user1", - "hashedPassword": "HASHED_PASSWORD_1", - "sshKeys": [ - "ssh-rsa KEY user1@pc" - ] - }, - { - "username": "user2", - "hashedPassword": "HASHED_PASSWORD_2", - "sshKeys": [ - ] - }, - { - "username": "user3", - "hashedPassword": "HASHED_PASSWORD_3" - } - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - } -} \ No newline at end of file From 125d221442dc8bf13d962cd0c3b67046da323c46 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 24 Nov 2023 14:16:42 +0000 Subject: [PATCH 171/246] test(services): untie dkim-related service tests from rest --- ...test_services.py => test_services_dkim.py} | 65 ++++++------------- 1 file changed, 19 insertions(+), 46 deletions(-) rename tests/{test_rest_endpoints/services/test_services.py => test_services_dkim.py} (60%) diff --git a/tests/test_rest_endpoints/services/test_services.py b/tests/test_services_dkim.py similarity index 60% rename from tests/test_rest_endpoints/services/test_services.py rename to tests/test_services_dkim.py index 1108e8c..02998c2 100644 --- a/tests/test_rest_endpoints/services/test_services.py +++ b/tests/test_services_dkim.py @@ -1,11 +1,12 @@ -import base64 -import json import pytest - -def read_json(file_path): - with open(file_path, "r", encoding="utf-8") as file: - return json.load(file) +from selfprivacy_api.services.service import ServiceStatus +from selfprivacy_api.services.bitwarden import Bitwarden +from selfprivacy_api.services.gitea import Gitea +from selfprivacy_api.services.mailserver import MailServer +from selfprivacy_api.services.nextcloud import Nextcloud +from selfprivacy_api.services.ocserv import Ocserv +from selfprivacy_api.services.pleroma import Pleroma def call_args_asserts(mocked_object): @@ -90,49 +91,21 @@ def mock_broken_service(mocker): ############################################################################### - -def test_unauthorized(client, mock_subproccess_popen): - """Test unauthorized""" - response = client.get("/services/status") - assert response.status_code == 401 - - -def test_illegal_methods(authorized_client, mock_subproccess_popen): - response = authorized_client.post("/services/status") - assert response.status_code == 405 - response = authorized_client.put("/services/status") - assert response.status_code == 405 - response = authorized_client.delete("/services/status") - assert response.status_code == 405 - - def test_dkim_key(authorized_client, mock_subproccess_popen): - response = authorized_client.get("/services/status") - assert response.status_code == 200 - assert response.json() == { - "imap": 0, - "smtp": 0, - "http": 0, - "bitwarden": 0, - "gitea": 0, - "nextcloud": 0, - "ocserv": 0, - "pleroma": 0, - } + assert MailServer.get_status() == ServiceStatus.ACTIVE + assert Bitwarden.get_status() == ServiceStatus.ACTIVE + assert Gitea.get_status() == ServiceStatus.ACTIVE + assert Nextcloud.get_status() == ServiceStatus.ACTIVE + assert Ocserv.get_status() == ServiceStatus.ACTIVE + assert Pleroma.get_status() == ServiceStatus.ACTIVE call_args_asserts(mock_subproccess_popen) def test_no_dkim_key(authorized_client, mock_broken_service): - response = authorized_client.get("/services/status") - assert response.status_code == 200 - assert response.json() == { - "imap": 1, - "smtp": 1, - "http": 0, - "bitwarden": 1, - "gitea": 1, - "nextcloud": 1, - "ocserv": 1, - "pleroma": 1, - } + assert MailServer.get_status() == ServiceStatus.FAILED + assert Bitwarden.get_status() == ServiceStatus.FAILED + assert Gitea.get_status() == ServiceStatus.FAILED + assert Nextcloud.get_status() == ServiceStatus.FAILED + assert Ocserv.get_status() == ServiceStatus.FAILED + assert Pleroma.get_status() == ServiceStatus.FAILED call_args_asserts(mock_broken_service) From 980d3622e8acce3e5c7c9a90eeda06f76a1f13d0 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 27 Nov 2023 18:12:45 +0000 Subject: [PATCH 172/246] test(services): remove redundant legacy bad-ssh-key test from rest-enfpo --- tests/test_rest_endpoints/services/test_ssh.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/tests/test_rest_endpoints/services/test_ssh.py b/tests/test_rest_endpoints/services/test_ssh.py index a17bdab..a1a33f8 100644 --- a/tests/test_rest_endpoints/services/test_ssh.py +++ b/tests/test_rest_endpoints/services/test_ssh.py @@ -269,13 +269,6 @@ def test_add_existing_root_key(authorized_client, root_and_admin_have_keys): ] -def test_add_invalid_root_key(authorized_client, ssh_on): - response = authorized_client.put( - "/services/ssh/key/send", json={"public_key": "INVALID KEY test@pc"} - ) - assert response.status_code == 400 - - ## /ssh/keys/{user} ###################################################### From 1b520a80935b786201de7382b242d68b9acf159a Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 15 Dec 2023 09:46:59 +0000 Subject: [PATCH 173/246] feature(ssh): change ssh settings from graphql --- .../graphql/mutations/system_mutations.py | 37 +++++++++++++++++++ tests/test_graphql/test_ssh.py | 33 +++++++++++++++++ 2 files changed, 70 insertions(+) diff --git a/selfprivacy_api/graphql/mutations/system_mutations.py b/selfprivacy_api/graphql/mutations/system_mutations.py index daada17..b0cdae8 100644 --- a/selfprivacy_api/graphql/mutations/system_mutations.py +++ b/selfprivacy_api/graphql/mutations/system_mutations.py @@ -9,6 +9,7 @@ from selfprivacy_api.graphql.mutations.mutation_interface import ( ) import selfprivacy_api.actions.system as system_actions +import selfprivacy_api.actions.ssh as ssh_actions @strawberry.type @@ -26,6 +27,22 @@ class AutoUpgradeSettingsMutationReturn(MutationReturnInterface): allowReboot: bool +@strawberry.type +class SSHSettingsMutationReturn(MutationReturnInterface): + """A return type for after changing SSH settings""" + + enable: bool + password_authentication: bool + + +@strawberry.input +class SSHSettingsInput: + """Input type for SSH settings""" + + enable: bool + password_authentication: bool + + @strawberry.input class AutoUpgradeSettingsInput: """Input type for auto upgrade settings""" @@ -76,6 +93,26 @@ class SystemMutations: allowReboot=new_settings.allowReboot, ) + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def change_ssh_settings( + self, settings: SSHSettingsInput + ) -> SSHSettingsMutationReturn: + """Change ssh settings of the server.""" + ssh_actions.set_ssh_settings( + enable=settings.enable, + password_authentication=settings.password_authentication, + ) + + new_settings = ssh_actions.get_ssh_settings() + + return SSHSettingsMutationReturn( + success=True, + message="SSH settings changed", + code=200, + enable=new_settings.enable, + password_authentication=new_settings.passwordAuthentication, + ) + @strawberry.mutation(permission_classes=[IsAuthenticated]) def run_system_rebuild(self) -> GenericMutationReturn: system_actions.rebuild_system() diff --git a/tests/test_graphql/test_ssh.py b/tests/test_graphql/test_ssh.py index eabf049..5f16c53 100644 --- a/tests/test_graphql/test_ssh.py +++ b/tests/test_graphql/test_ssh.py @@ -4,6 +4,7 @@ import pytest from tests.common import read_json from tests.test_graphql.common import assert_empty +from selfprivacy_api.graphql.mutations.system_mutations import SystemMutations class ProcessMock: @@ -59,6 +60,38 @@ mutation addSshKey($sshInput: SshMutationInput!) { } """ +API_SET_SSH_SETTINGS = """ +mutation enableSsh($sshInput: SSHSettingsInput!) { + system { + changeSshSettings(sshInput: $sshInput) { + success + message + code + enable + password_authentication + } + } +} +""" + + +def test_graphql_change_ssh_settings_unauthorized( + client, some_users, mock_subprocess_popen +): + response = client.post( + "/graphql", + json={ + "query": API_SET_SSH_SETTINGS, + "variables": { + "sshInput": { + "enable": True, + "passwordAuthentication": True, + }, + }, + }, + ) + assert_empty(response) + def test_graphql_add_ssh_key_unauthorized(client, some_users, mock_subprocess_popen): response = client.post( From 66561308bf94a68c1f746c89a40c88c973569a2c Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 15 Dec 2023 10:09:35 +0000 Subject: [PATCH 174/246] test(ssh): add graphql ssh status query test --- tests/test_graphql/test_ssh.py | 35 +++++++++++++++++++++++++++++++--- 1 file changed, 32 insertions(+), 3 deletions(-) diff --git a/tests/test_graphql/test_ssh.py b/tests/test_graphql/test_ssh.py index 5f16c53..409ecad 100644 --- a/tests/test_graphql/test_ssh.py +++ b/tests/test_graphql/test_ssh.py @@ -2,9 +2,11 @@ # pylint: disable=unused-argument import pytest -from tests.common import read_json -from tests.test_graphql.common import assert_empty from selfprivacy_api.graphql.mutations.system_mutations import SystemMutations +from selfprivacy_api.graphql.queries.system import System + +from tests.common import read_json, generate_system_query +from tests.test_graphql.common import assert_empty, get_data class ProcessMock: @@ -68,12 +70,39 @@ mutation enableSsh($sshInput: SSHSettingsInput!) { message code enable - password_authentication + passwordAuthentication } } } """ +API_SSH_SETTINGS_QUERY = """ +settings { + ssh { + enable + passwordAuthentication + } +} +""" + + +def api_ssh_settings(authorized_client): + response = authorized_client.post( + "/graphql", + json={"query": generate_system_query([API_SSH_SETTINGS_QUERY])}, + ) + data = get_data(response) + result = data["system"]["settings"]["ssh"] + assert result is not None + return result + + +def test_graphql_ssh_enabled_by_default(authorized_client, some_users): + # TODO: Should it be enabled by default though if there are no keys anyway? + settings = api_ssh_settings(authorized_client) + assert settings["enable"] is True + assert settings["passwordAuthentication"] is True + def test_graphql_change_ssh_settings_unauthorized( client, some_users, mock_subprocess_popen From f179cff0b4ebd101f8ae61f3c01ac8f791a1e996 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 15 Dec 2023 10:43:07 +0000 Subject: [PATCH 175/246] test(ssh): try disabling ssh --- tests/test_graphql/test_ssh.py | 35 +++++++++++++++++++++++++++++----- 1 file changed, 30 insertions(+), 5 deletions(-) diff --git a/tests/test_graphql/test_ssh.py b/tests/test_graphql/test_ssh.py index 409ecad..9b007c0 100644 --- a/tests/test_graphql/test_ssh.py +++ b/tests/test_graphql/test_ssh.py @@ -6,7 +6,7 @@ from selfprivacy_api.graphql.mutations.system_mutations import SystemMutations from selfprivacy_api.graphql.queries.system import System from tests.common import read_json, generate_system_query -from tests.test_graphql.common import assert_empty, get_data +from tests.test_graphql.common import assert_empty, assert_ok, get_data class ProcessMock: @@ -63,9 +63,9 @@ mutation addSshKey($sshInput: SshMutationInput!) { """ API_SET_SSH_SETTINGS = """ -mutation enableSsh($sshInput: SSHSettingsInput!) { +mutation enableSsh($settings: SSHSettingsInput!) { system { - changeSshSettings(sshInput: $sshInput) { + changeSshSettings(settings: $settings) { success message code @@ -97,8 +97,26 @@ def api_ssh_settings(authorized_client): return result -def test_graphql_ssh_enabled_by_default(authorized_client, some_users): - # TODO: Should it be enabled by default though if there are no keys anyway? +def api_set_ssh_settings(authorized_client, enable: bool, password_auth: bool): + response = authorized_client.post( + "/graphql", + json={ + "query": API_SET_SSH_SETTINGS, + "variables": { + "settings": { + "enable": enable, + "passwordAuthentication": password_auth, + }, + }, + }, + ) + data = get_data(response) + result = data["system"]["changeSshSettings"] + assert result is not None + return result + + +def test_graphql_ssh_query(authorized_client, some_users): settings = api_ssh_settings(authorized_client) assert settings["enable"] is True assert settings["passwordAuthentication"] is True @@ -122,6 +140,13 @@ def test_graphql_change_ssh_settings_unauthorized( assert_empty(response) +def test_graphql_disable_ssh(authorized_client, some_users, mock_subprocess_popen): + output = api_set_ssh_settings(authorized_client, enable=False, password_auth=False) + assert_ok(output) + assert output["enable"] == False + assert output["passwordAuthentication"] == False + + def test_graphql_add_ssh_key_unauthorized(client, some_users, mock_subprocess_popen): response = client.post( "/graphql", From 7c382c4779ad57854f068169f69402b9519c8ec2 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 15 Dec 2023 10:51:47 +0000 Subject: [PATCH 176/246] test(ssh): flip flop ssh --- tests/test_graphql/test_ssh.py | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/tests/test_graphql/test_ssh.py b/tests/test_graphql/test_ssh.py index 9b007c0..eefa6d8 100644 --- a/tests/test_graphql/test_ssh.py +++ b/tests/test_graphql/test_ssh.py @@ -140,12 +140,29 @@ def test_graphql_change_ssh_settings_unauthorized( assert_empty(response) -def test_graphql_disable_ssh(authorized_client, some_users, mock_subprocess_popen): +def test_graphql_disable_enable_ssh( + authorized_client, some_users, mock_subprocess_popen +): output = api_set_ssh_settings(authorized_client, enable=False, password_auth=False) assert_ok(output) assert output["enable"] == False assert output["passwordAuthentication"] == False + output = api_set_ssh_settings(authorized_client, enable=True, password_auth=True) + assert_ok(output) + assert output["enable"] == True + assert output["passwordAuthentication"] == True + + output = api_set_ssh_settings(authorized_client, enable=True, password_auth=False) + assert_ok(output) + assert output["enable"] == True + assert output["passwordAuthentication"] == False + + output = api_set_ssh_settings(authorized_client, enable=False, password_auth=True) + assert_ok(output) + assert output["enable"] == False + assert output["passwordAuthentication"] == True + def test_graphql_add_ssh_key_unauthorized(client, some_users, mock_subprocess_popen): response = client.post( From 4e730f015a2b453d75db208a22729dfb4150a8c1 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 15 Dec 2023 11:02:31 +0000 Subject: [PATCH 177/246] test(ssh): test that query is in sync --- tests/test_graphql/test_ssh.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tests/test_graphql/test_ssh.py b/tests/test_graphql/test_ssh.py index eefa6d8..a0d82ba 100644 --- a/tests/test_graphql/test_ssh.py +++ b/tests/test_graphql/test_ssh.py @@ -140,6 +140,11 @@ def test_graphql_change_ssh_settings_unauthorized( assert_empty(response) +def assert_includes(smaller_dict: dict, bigger_dict: dict): + for item in smaller_dict.items(): + assert item in bigger_dict.items() + + def test_graphql_disable_enable_ssh( authorized_client, some_users, mock_subprocess_popen ): @@ -147,21 +152,25 @@ def test_graphql_disable_enable_ssh( assert_ok(output) assert output["enable"] == False assert output["passwordAuthentication"] == False + assert_includes(api_ssh_settings(authorized_client), output) output = api_set_ssh_settings(authorized_client, enable=True, password_auth=True) assert_ok(output) assert output["enable"] == True assert output["passwordAuthentication"] == True + assert_includes(api_ssh_settings(authorized_client), output) output = api_set_ssh_settings(authorized_client, enable=True, password_auth=False) assert_ok(output) assert output["enable"] == True assert output["passwordAuthentication"] == False + assert_includes(api_ssh_settings(authorized_client), output) output = api_set_ssh_settings(authorized_client, enable=False, password_auth=True) assert_ok(output) assert output["enable"] == False assert output["passwordAuthentication"] == True + assert_includes(api_ssh_settings(authorized_client), output) def test_graphql_add_ssh_key_unauthorized(client, some_users, mock_subprocess_popen): From 1bb24b5f932edbd388d89baf49e773e7590e2b23 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 15 Dec 2023 11:09:20 +0000 Subject: [PATCH 178/246] test(ssh): test idempotency of enablement --- tests/test_graphql/test_ssh.py | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/tests/test_graphql/test_ssh.py b/tests/test_graphql/test_ssh.py index a0d82ba..7ec51d4 100644 --- a/tests/test_graphql/test_ssh.py +++ b/tests/test_graphql/test_ssh.py @@ -173,6 +173,32 @@ def test_graphql_disable_enable_ssh( assert_includes(api_ssh_settings(authorized_client), output) +def test_graphql_disable_twice(authorized_client, some_users, mock_subprocess_popen): + output = api_set_ssh_settings(authorized_client, enable=False, password_auth=False) + assert_ok(output) + assert output["enable"] == False + assert output["passwordAuthentication"] == False + + output = api_set_ssh_settings(authorized_client, enable=False, password_auth=False) + assert_ok(output) + assert output["enable"] == False + assert output["passwordAuthentication"] == False + + +def test_graphql_enable_twice(authorized_client, some_users, mock_subprocess_popen): + output = api_set_ssh_settings(authorized_client, enable=True, password_auth=True) + assert_ok(output) + assert output["enable"] == True + assert output["passwordAuthentication"] == True + assert_includes(api_ssh_settings(authorized_client), output) + + output = api_set_ssh_settings(authorized_client, enable=True, password_auth=True) + assert_ok(output) + assert output["enable"] == True + assert output["passwordAuthentication"] == True + assert_includes(api_ssh_settings(authorized_client), output) + + def test_graphql_add_ssh_key_unauthorized(client, some_users, mock_subprocess_popen): response = client.post( "/graphql", From b644208c29e9fa06b607ce747dda43af5d4de20a Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 15 Dec 2023 11:22:20 +0000 Subject: [PATCH 179/246] test(ssh): cleanup --- tests/test_graphql/test_ssh.py | 38 +++++++++++++++++----------------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/tests/test_graphql/test_ssh.py b/tests/test_graphql/test_ssh.py index 7ec51d4..c601b28 100644 --- a/tests/test_graphql/test_ssh.py +++ b/tests/test_graphql/test_ssh.py @@ -16,7 +16,7 @@ class ProcessMock: self.args = args self.kwargs = kwargs - def communicate(): # pylint: disable=no-method-argument + def communicate(self): # pylint: disable=no-method-argument return (b"NEW_HASHED", None) returncode = 0 @@ -150,52 +150,52 @@ def test_graphql_disable_enable_ssh( ): output = api_set_ssh_settings(authorized_client, enable=False, password_auth=False) assert_ok(output) - assert output["enable"] == False - assert output["passwordAuthentication"] == False + assert output["enable"] is False + assert output["passwordAuthentication"] is False assert_includes(api_ssh_settings(authorized_client), output) output = api_set_ssh_settings(authorized_client, enable=True, password_auth=True) assert_ok(output) - assert output["enable"] == True - assert output["passwordAuthentication"] == True + assert output["enable"] is True + assert output["passwordAuthentication"] is True assert_includes(api_ssh_settings(authorized_client), output) output = api_set_ssh_settings(authorized_client, enable=True, password_auth=False) assert_ok(output) - assert output["enable"] == True - assert output["passwordAuthentication"] == False + assert output["enable"] is True + assert output["passwordAuthentication"] is False assert_includes(api_ssh_settings(authorized_client), output) output = api_set_ssh_settings(authorized_client, enable=False, password_auth=True) assert_ok(output) - assert output["enable"] == False - assert output["passwordAuthentication"] == True + assert output["enable"] is False + assert output["passwordAuthentication"] is True assert_includes(api_ssh_settings(authorized_client), output) -def test_graphql_disable_twice(authorized_client, some_users, mock_subprocess_popen): +def test_graphql_disable_twice(authorized_client, some_users): output = api_set_ssh_settings(authorized_client, enable=False, password_auth=False) assert_ok(output) - assert output["enable"] == False - assert output["passwordAuthentication"] == False + assert output["enable"] is False + assert output["passwordAuthentication"] is False output = api_set_ssh_settings(authorized_client, enable=False, password_auth=False) assert_ok(output) - assert output["enable"] == False - assert output["passwordAuthentication"] == False + assert output["enable"] is False + assert output["passwordAuthentication"] is False -def test_graphql_enable_twice(authorized_client, some_users, mock_subprocess_popen): +def test_graphql_enable_twice(authorized_client, some_users): output = api_set_ssh_settings(authorized_client, enable=True, password_auth=True) assert_ok(output) - assert output["enable"] == True - assert output["passwordAuthentication"] == True + assert output["enable"] is True + assert output["passwordAuthentication"] is True assert_includes(api_ssh_settings(authorized_client), output) output = api_set_ssh_settings(authorized_client, enable=True, password_auth=True) assert_ok(output) - assert output["enable"] == True - assert output["passwordAuthentication"] == True + assert output["enable"] is True + assert output["passwordAuthentication"] is True assert_includes(api_ssh_settings(authorized_client), output) From e11e73f8725032426a219ca99aa92dae1bbe29e8 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 15 Dec 2023 12:48:18 +0000 Subject: [PATCH 180/246] test(ssh): add json storage writing tests --- tests/test_ssh.py | 84 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 84 insertions(+) create mode 100644 tests/test_ssh.py diff --git a/tests/test_ssh.py b/tests/test_ssh.py new file mode 100644 index 0000000..6cb255d --- /dev/null +++ b/tests/test_ssh.py @@ -0,0 +1,84 @@ +import pytest + +from selfprivacy_api.actions.ssh import set_ssh_settings, get_ssh_settings +from selfprivacy_api.utils import WriteUserData, ReadUserData + + +@pytest.fixture(params=[True, False]) +def bool_value(request): + return request.param + + +@pytest.fixture( + params=[ + "normal_populated_json", + "deleted_enabled", + "deleted_auth", + "empty", + "ssh_not_in_json", + ] +) +def possibly_undefined_ssh_settings(generic_userdata, request, bool_value): + with WriteUserData() as data: + data["ssh"] = {"enable": bool_value, "passswordAuthentication": bool_value} + assert get_raw_json_ssh_setting("enable") == bool_value + assert get_raw_json_ssh_setting("passswordAuthentication") == bool_value + + if request.param == "deleted_enabled": + with WriteUserData() as data: + del data["ssh"]["enable"] + + if request.param == "deleted_auth": + with WriteUserData() as data: + del data["ssh"]["passswordAuthentication"] + + if request.param == "empty": + with WriteUserData() as data: + del data["ssh"]["passswordAuthentication"] + del data["ssh"]["enable"] + + if request.param == "ssh_not_in_json": + with WriteUserData() as data: + del data["ssh"] + + +@pytest.fixture(params=[True, False, None]) +def ssh_enable_spectrum(request): + return request.param + + +@pytest.fixture(params=[True, False, None]) +def password_auth_spectrum(request): + return request.param + + +def get_raw_json_ssh_setting(setting: str): + with ReadUserData() as data: + return (data.get("ssh") or {}).get(setting) + + +def test_enabling_disabling_writes_json( + possibly_undefined_ssh_settings, ssh_enable_spectrum, password_auth_spectrum +): + + original_enable = get_raw_json_ssh_setting("enable") + original_password_auth = get_raw_json_ssh_setting("passwordAuthentication") + + set_ssh_settings(ssh_enable_spectrum, password_auth_spectrum) + + with ReadUserData() as data: + if ssh_enable_spectrum is None: + assert get_raw_json_ssh_setting("enable") == original_enable + else: + assert get_raw_json_ssh_setting("enable") == ssh_enable_spectrum + + if password_auth_spectrum is None: + assert ( + get_raw_json_ssh_setting("passwordAuthentication") + == original_password_auth + ) + else: + assert ( + get_raw_json_ssh_setting("passwordAuthentication") + == password_auth_spectrum + ) From f35280b76478343bd05e3f33dc63cb69cdc2defd Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 18 Dec 2023 11:21:21 +0000 Subject: [PATCH 181/246] test(ssh): add json storage reading tests --- tests/test_ssh.py | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/tests/test_ssh.py b/tests/test_ssh.py index 6cb255d..5c3414b 100644 --- a/tests/test_ssh.py +++ b/tests/test_ssh.py @@ -57,6 +57,31 @@ def get_raw_json_ssh_setting(setting: str): return (data.get("ssh") or {}).get(setting) +def test_read_json(possibly_undefined_ssh_settings): + with ReadUserData() as data: + if "ssh" not in data.keys(): + assert get_ssh_settings().enable is not None + assert get_ssh_settings().passwordAuthentication is not None + + # TODO: Is it really a good idea to have password ssh enabled by default? + assert get_ssh_settings().enable is True + assert get_ssh_settings().passwordAuthentication is True + return + + if "enable" not in data["ssh"].keys(): + assert get_ssh_settings().enable is True + else: + assert get_ssh_settings().enable == data["ssh"]["enable"] + + if "passwordAuthentication" not in data["ssh"].keys(): + assert get_ssh_settings().passwordAuthentication is True + else: + assert ( + get_ssh_settings().passwordAuthentication + == data["ssh"]["passwordAuthentication"] + ) + + def test_enabling_disabling_writes_json( possibly_undefined_ssh_settings, ssh_enable_spectrum, password_auth_spectrum ): From 5651dcd94ec73962aaa603cdcbb44a72692fbb7b Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 18 Dec 2023 11:49:58 +0000 Subject: [PATCH 182/246] test(ssh): remove rest tests for undefined ssh settings --- .../test_rest_endpoints/services/test_ssh.py | 23 ------------------- 1 file changed, 23 deletions(-) diff --git a/tests/test_rest_endpoints/services/test_ssh.py b/tests/test_rest_endpoints/services/test_ssh.py index a1a33f8..961b277 100644 --- a/tests/test_rest_endpoints/services/test_ssh.py +++ b/tests/test_rest_endpoints/services/test_ssh.py @@ -152,18 +152,6 @@ def test_get_current_settings_all_off(authorized_client, all_off): assert response.json() == {"enable": False, "passwordAuthentication": False} -def test_get_current_settings_undefined(authorized_client, undefined_settings): - response = authorized_client.get("/services/ssh") - assert response.status_code == 200 - assert response.json() == {"enable": True, "passwordAuthentication": True} - - -def test_get_current_settings_mostly_undefined(authorized_client, undefined_values): - response = authorized_client.get("/services/ssh") - assert response.status_code == 200 - assert response.json() == {"enable": True, "passwordAuthentication": True} - - ## PUT ON /ssh ###################################################### available_settings = [ @@ -211,17 +199,6 @@ def test_set_settings_all_off(authorized_client, all_off, settings): assert data["passwordAuthentication"] == settings["passwordAuthentication"] -@pytest.mark.parametrize("settings", available_settings) -def test_set_settings_undefined(authorized_client, undefined_settings, settings): - response = authorized_client.put("/services/ssh", json=settings) - assert response.status_code == 200 - data = read_json(undefined_settings / "undefined.json")["ssh"] - if "enable" in settings: - assert data["enable"] == settings["enable"] - if "passwordAuthentication" in settings: - assert data["passwordAuthentication"] == settings["passwordAuthentication"] - - ## PUT ON /ssh/key/send ###################################################### From ed4f6bfe327f531a3705456c6435a3b87db47919 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 18 Dec 2023 11:50:24 +0000 Subject: [PATCH 183/246] test(ssh): add test for unauthorized settings getting --- tests/test_graphql/test_ssh.py | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/tests/test_graphql/test_ssh.py b/tests/test_graphql/test_ssh.py index c601b28..9f6b6eb 100644 --- a/tests/test_graphql/test_ssh.py +++ b/tests/test_graphql/test_ssh.py @@ -86,11 +86,15 @@ settings { """ -def api_ssh_settings(authorized_client): - response = authorized_client.post( +def api_ssh_settings_raw(client): + return client.post( "/graphql", json={"query": generate_system_query([API_SSH_SETTINGS_QUERY])}, ) + + +def api_ssh_settings(authorized_client): + response = api_ssh_settings_raw(authorized_client) data = get_data(response) result = data["system"]["settings"]["ssh"] assert result is not None @@ -122,9 +126,12 @@ def test_graphql_ssh_query(authorized_client, some_users): assert settings["passwordAuthentication"] is True -def test_graphql_change_ssh_settings_unauthorized( - client, some_users, mock_subprocess_popen -): +def test_graphql_get_ssh_settings_unauthorized(client, some_users): + response = api_ssh_settings_raw(client) + assert_empty(response) + + +def test_graphql_change_ssh_settings_unauthorized(client, some_users): response = client.post( "/graphql", json={ @@ -148,18 +155,21 @@ def assert_includes(smaller_dict: dict, bigger_dict: dict): def test_graphql_disable_enable_ssh( authorized_client, some_users, mock_subprocess_popen ): + # Off output = api_set_ssh_settings(authorized_client, enable=False, password_auth=False) assert_ok(output) assert output["enable"] is False assert output["passwordAuthentication"] is False assert_includes(api_ssh_settings(authorized_client), output) + # On output = api_set_ssh_settings(authorized_client, enable=True, password_auth=True) assert_ok(output) assert output["enable"] is True assert output["passwordAuthentication"] is True assert_includes(api_ssh_settings(authorized_client), output) + # Criss-Cross output = api_set_ssh_settings(authorized_client, enable=True, password_auth=False) assert_ok(output) assert output["enable"] is True From 6c0d4ab42ab4a37514b7de5f98d050f05843e4ba Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 18 Dec 2023 11:57:21 +0000 Subject: [PATCH 184/246] test(ssh): remove basic unauthorized tests from rest ssh tests --- tests/test_rest_endpoints/services/test_ssh.py | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/tests/test_rest_endpoints/services/test_ssh.py b/tests/test_rest_endpoints/services/test_ssh.py index 961b277..1d8343a 100644 --- a/tests/test_rest_endpoints/services/test_ssh.py +++ b/tests/test_rest_endpoints/services/test_ssh.py @@ -92,21 +92,6 @@ def some_users(mocker, datadir): return datadir -## TEST 401 ###################################################### - - -@pytest.mark.parametrize("endpoint", ["ssh/enable", "ssh/keys/user"]) -def test_unauthorized(client, ssh_off, endpoint): - response = client.post(f"/services/{endpoint}") - assert response.status_code == 401 - - -@pytest.mark.parametrize("endpoint", ["ssh", "ssh/key/send"]) -def test_unauthorized_put(client, ssh_off, endpoint): - response = client.put(f"/services/{endpoint}") - assert response.status_code == 401 - - ## TEST ENABLE ###################################################### From 9822d42dac7b32ae093534029802f9a6c09ba958 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 18 Dec 2023 13:26:47 +0000 Subject: [PATCH 185/246] test(ssh): remove rest enablement tests --- .../test_rest_endpoints/services/test_ssh.py | 24 ------------------- 1 file changed, 24 deletions(-) diff --git a/tests/test_rest_endpoints/services/test_ssh.py b/tests/test_rest_endpoints/services/test_ssh.py index 1d8343a..d09f089 100644 --- a/tests/test_rest_endpoints/services/test_ssh.py +++ b/tests/test_rest_endpoints/services/test_ssh.py @@ -92,30 +92,6 @@ def some_users(mocker, datadir): return datadir -## TEST ENABLE ###################################################### - - -def test_legacy_enable(authorized_client, ssh_off): - response = authorized_client.post("/services/ssh/enable") - assert response.status_code == 200 - assert read_json(ssh_off / "turned_off.json") == read_json( - ssh_off / "turned_on.json" - ) - - -def test_legacy_on_undefined(authorized_client, undefined_settings): - response = authorized_client.post("/services/ssh/enable") - assert response.status_code == 200 - data = read_json(undefined_settings / "undefined.json") - assert data["ssh"]["enable"] == True - - -def test_legacy_enable_when_enabled(authorized_client, ssh_on): - response = authorized_client.post("/services/ssh/enable") - assert response.status_code == 200 - assert read_json(ssh_on / "turned_on.json") == read_json(ssh_on / "turned_on.json") - - ## GET ON /ssh ###################################################### From 60c7e9a7e2b846fd8e861ad9d099f336147e039c Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 18 Dec 2023 14:16:30 +0000 Subject: [PATCH 186/246] test(ssh): full ssh enablement-via-gql readwrite testing --- tests/test_graphql/test_ssh.py | 83 +++++++++++++++++++++++----------- 1 file changed, 56 insertions(+), 27 deletions(-) diff --git a/tests/test_graphql/test_ssh.py b/tests/test_graphql/test_ssh.py index 9f6b6eb..a911fb1 100644 --- a/tests/test_graphql/test_ssh.py +++ b/tests/test_graphql/test_ssh.py @@ -101,16 +101,13 @@ def api_ssh_settings(authorized_client): return result -def api_set_ssh_settings(authorized_client, enable: bool, password_auth: bool): +def api_set_ssh_settings_dict(authorized_client, dict): response = authorized_client.post( "/graphql", json={ "query": API_SET_SSH_SETTINGS, "variables": { - "settings": { - "enable": enable, - "passwordAuthentication": password_auth, - }, + "settings": dict, }, }, ) @@ -120,6 +117,16 @@ def api_set_ssh_settings(authorized_client, enable: bool, password_auth: bool): return result +def api_set_ssh_settings(authorized_client, enable: bool, password_auth: bool): + return api_set_ssh_settings_dict( + authorized_client, + { + "enable": enable, + "passwordAuthentication": password_auth, + }, + ) + + def test_graphql_ssh_query(authorized_client, some_users): settings = api_ssh_settings(authorized_client) assert settings["enable"] is True @@ -152,35 +159,57 @@ def assert_includes(smaller_dict: dict, bigger_dict: dict): assert item in bigger_dict.items() -def test_graphql_disable_enable_ssh( - authorized_client, some_users, mock_subprocess_popen +available_settings = [ + {"enable": True, "passwordAuthentication": True}, + {"enable": True, "passwordAuthentication": False}, + {"enable": False, "passwordAuthentication": True}, + {"enable": False, "passwordAuthentication": False}, +] + + +original_settings = [ + {"enable": True, "passwordAuthentication": True}, + {"enable": True, "passwordAuthentication": False}, + {"enable": False, "passwordAuthentication": True}, + {"enable": False, "passwordAuthentication": False}, +] + + +@pytest.mark.parametrize("original_settings", original_settings) +@pytest.mark.parametrize("settings", available_settings) +def test_graphql_readwrite_ssh_settings( + authorized_client, some_users, settings, original_settings ): - # Off - output = api_set_ssh_settings(authorized_client, enable=False, password_auth=False) - assert_ok(output) - assert output["enable"] is False - assert output["passwordAuthentication"] is False + + # Userdata-related tests like undefined fields are in actions-level tests. + output = api_set_ssh_settings_dict(authorized_client, original_settings) assert_includes(api_ssh_settings(authorized_client), output) - # On - output = api_set_ssh_settings(authorized_client, enable=True, password_auth=True) + output = api_set_ssh_settings_dict(authorized_client, settings) assert_ok(output) - assert output["enable"] is True - assert output["passwordAuthentication"] is True + assert_includes(settings, output) + if "enable" not in settings.keys(): + assert output["enable"] == original_settings["enable"] assert_includes(api_ssh_settings(authorized_client), output) - # Criss-Cross - output = api_set_ssh_settings(authorized_client, enable=True, password_auth=False) - assert_ok(output) - assert output["enable"] is True - assert output["passwordAuthentication"] is False - assert_includes(api_ssh_settings(authorized_client), output) - output = api_set_ssh_settings(authorized_client, enable=False, password_auth=True) - assert_ok(output) - assert output["enable"] is False - assert output["passwordAuthentication"] is True - assert_includes(api_ssh_settings(authorized_client), output) +forbidden_settings = [ + # we include this here so that if the next version makes the fields + # optional, the tests will remind the person that tests are to be extended accordingly + {"enable": True}, + {"passwordAuthentication": True}, +] + + +@pytest.mark.parametrize("original_settings", original_settings) +@pytest.mark.parametrize("settings", forbidden_settings) +def test_graphql_readwrite_ssh_settings_partial( + authorized_client, some_users, settings, original_settings +): + + output = api_set_ssh_settings_dict(authorized_client, original_settings) + with pytest.raises(Exception): + output = api_set_ssh_settings_dict(authorized_client, settings) def test_graphql_disable_twice(authorized_client, some_users): From 0b90e3d20f5071c2ec4a073664e1096adf7b2f27 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 18 Dec 2023 14:53:47 +0000 Subject: [PATCH 187/246] test(ssh): remove rest ssh enablement tests --- .../test_rest_endpoints/services/test_ssh.py | 68 ------------------- 1 file changed, 68 deletions(-) diff --git a/tests/test_rest_endpoints/services/test_ssh.py b/tests/test_rest_endpoints/services/test_ssh.py index d09f089..759e5ed 100644 --- a/tests/test_rest_endpoints/services/test_ssh.py +++ b/tests/test_rest_endpoints/services/test_ssh.py @@ -92,74 +92,6 @@ def some_users(mocker, datadir): return datadir -## GET ON /ssh ###################################################### - - -def test_get_current_settings_ssh_off(authorized_client, ssh_off): - response = authorized_client.get("/services/ssh") - assert response.status_code == 200 - assert response.json() == {"enable": False, "passwordAuthentication": True} - - -def test_get_current_settings_ssh_on(authorized_client, ssh_on): - response = authorized_client.get("/services/ssh") - assert response.status_code == 200 - assert response.json() == {"enable": True, "passwordAuthentication": True} - - -def test_get_current_settings_all_off(authorized_client, all_off): - response = authorized_client.get("/services/ssh") - assert response.status_code == 200 - assert response.json() == {"enable": False, "passwordAuthentication": False} - - -## PUT ON /ssh ###################################################### - -available_settings = [ - {"enable": True, "passwordAuthentication": True}, - {"enable": True, "passwordAuthentication": False}, - {"enable": False, "passwordAuthentication": True}, - {"enable": False, "passwordAuthentication": False}, - {"enable": True}, - {"enable": False}, - {"passwordAuthentication": True}, - {"passwordAuthentication": False}, -] - - -@pytest.mark.parametrize("settings", available_settings) -def test_set_settings_ssh_off(authorized_client, ssh_off, settings): - response = authorized_client.put("/services/ssh", json=settings) - assert response.status_code == 200 - data = read_json(ssh_off / "turned_off.json")["ssh"] - if "enable" in settings: - assert data["enable"] == settings["enable"] - if "passwordAuthentication" in settings: - assert data["passwordAuthentication"] == settings["passwordAuthentication"] - - -@pytest.mark.parametrize("settings", available_settings) -def test_set_settings_ssh_on(authorized_client, ssh_on, settings): - response = authorized_client.put("/services/ssh", json=settings) - assert response.status_code == 200 - data = read_json(ssh_on / "turned_on.json")["ssh"] - if "enable" in settings: - assert data["enable"] == settings["enable"] - if "passwordAuthentication" in settings: - assert data["passwordAuthentication"] == settings["passwordAuthentication"] - - -@pytest.mark.parametrize("settings", available_settings) -def test_set_settings_all_off(authorized_client, all_off, settings): - response = authorized_client.put("/services/ssh", json=settings) - assert response.status_code == 200 - data = read_json(all_off / "all_off.json")["ssh"] - if "enable" in settings: - assert data["enable"] == settings["enable"] - if "passwordAuthentication" in settings: - assert data["passwordAuthentication"] == settings["passwordAuthentication"] - - ## PUT ON /ssh/key/send ###################################################### From a5ab0df1614a64e32615454bcab53411d6448307 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 20 Dec 2023 10:26:54 +0000 Subject: [PATCH 188/246] test(ssh): add rootkey json tests --- tests/test_ssh.py | 82 ++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 81 insertions(+), 1 deletion(-) diff --git a/tests/test_ssh.py b/tests/test_ssh.py index 5c3414b..f291dd4 100644 --- a/tests/test_ssh.py +++ b/tests/test_ssh.py @@ -1,6 +1,12 @@ import pytest -from selfprivacy_api.actions.ssh import set_ssh_settings, get_ssh_settings +from selfprivacy_api.actions.ssh import ( + set_ssh_settings, + get_ssh_settings, + create_ssh_key, + remove_ssh_key, +) +from selfprivacy_api.actions.users import get_users from selfprivacy_api.utils import WriteUserData, ReadUserData @@ -107,3 +113,77 @@ def test_enabling_disabling_writes_json( get_raw_json_ssh_setting("passwordAuthentication") == password_auth_spectrum ) + + +def test_read_root_keys_from_json(generic_userdata): + assert get_ssh_settings().rootKeys == ["ssh-ed25519 KEY test@pc"] + new_keys = ["ssh-ed25519 KEY test@pc", "ssh-ed25519 KEY2 test@pc"] + + with WriteUserData() as data: + data["ssh"]["rootKeys"] = new_keys + + assert get_ssh_settings().rootKeys == new_keys + + with WriteUserData() as data: + del data["ssh"]["rootKeys"] + + assert get_ssh_settings().rootKeys == [] + + with WriteUserData() as data: + del data["ssh"] + + assert get_ssh_settings().rootKeys == [] + + +def test_removing_root_key_writes_json(generic_userdata): + # generic userdata has a a single root key + rootkeys = get_ssh_settings().rootKeys + assert len(rootkeys) == 1 + key1 = rootkeys[0] + key2 = "ssh-rsa MYSUPERKEY root@pc" + + create_ssh_key("root", key2) + rootkeys = get_ssh_settings().rootKeys + assert len(rootkeys) == 2 + + remove_ssh_key("root", key2) + with ReadUserData() as data: + assert "ssh" in data + assert "rootKeys" in data["ssh"] + assert data["ssh"]["rootKeys"] == [key1] + + remove_ssh_key("root", key1) + with ReadUserData() as data: + assert "ssh" in data + assert "rootKeys" in data["ssh"] + assert data["ssh"]["rootKeys"] == [] + + +def test_adding_root_key_writes_json(generic_userdata): + with WriteUserData() as data: + del data["ssh"] + key1 = "ssh-ed25519 KEY test@pc" + key2 = "ssh-ed25519 KEY2 test@pc" + create_ssh_key("root", key1) + + with ReadUserData() as data: + assert "ssh" in data + assert "rootKeys" in data["ssh"] + assert data["ssh"]["rootKeys"] == [key1] + + with WriteUserData() as data: + del data["ssh"]["rootKeys"] + create_ssh_key("root", key1) + + with ReadUserData() as data: + assert "ssh" in data + assert "rootKeys" in data["ssh"] + assert data["ssh"]["rootKeys"] == [key1] + + create_ssh_key("root", key2) + + with ReadUserData() as data: + assert "ssh" in data + assert "rootKeys" in data["ssh"] + # order is irrelevant + assert set(data["ssh"]["rootKeys"]) == set([key1, key2]) From 25d2537208cefb1aeed133f5d007baeb39e48687 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 20 Dec 2023 10:32:02 +0000 Subject: [PATCH 189/246] test(ssh): add docstring with scope to tests/test_ssh --- tests/test_ssh.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tests/test_ssh.py b/tests/test_ssh.py index f291dd4..739d321 100644 --- a/tests/test_ssh.py +++ b/tests/test_ssh.py @@ -1,3 +1,8 @@ +""" +Action-level tests of ssh +(For API-independent logic incl. connection to persistent storage) +""" + import pytest from selfprivacy_api.actions.ssh import ( From a2065b87b76f378ff83fdabd505ec2a6fe7691b5 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 20 Dec 2023 11:36:58 +0000 Subject: [PATCH 190/246] test(ssh): delete undefined root keys --- .../test_rest_endpoints/services/test_ssh.py | 15 ------------ tests/test_ssh.py | 24 +++++++++++++++++++ 2 files changed, 24 insertions(+), 15 deletions(-) diff --git a/tests/test_rest_endpoints/services/test_ssh.py b/tests/test_rest_endpoints/services/test_ssh.py index 759e5ed..cd7ed86 100644 --- a/tests/test_rest_endpoints/services/test_ssh.py +++ b/tests/test_rest_endpoints/services/test_ssh.py @@ -105,15 +105,6 @@ def test_add_root_key(authorized_client, ssh_on): ] -def test_add_root_key_on_undefined(authorized_client, undefined_settings): - response = authorized_client.put( - "/services/ssh/key/send", json={"public_key": "ssh-rsa KEY test@pc"} - ) - assert response.status_code == 201 - data = read_json(undefined_settings / "undefined.json") - assert data["ssh"]["rootKeys"] == ["ssh-rsa KEY test@pc"] - - def test_add_root_key_one_more(authorized_client, root_and_admin_have_keys): response = authorized_client.put( "/services/ssh/key/send", json={"public_key": "ssh-rsa KEY test@pc"} @@ -154,12 +145,6 @@ def test_get_root_key_when_none(authorized_client, ssh_on): assert response.json() == [] -def test_get_root_key_on_undefined(authorized_client, undefined_settings): - response = authorized_client.get("/services/ssh/keys/root") - assert response.status_code == 200 - assert response.json() == [] - - def test_delete_root_key(authorized_client, root_and_admin_have_keys): response = authorized_client.delete( "/services/ssh/keys/root", json={"public_key": "ssh-ed25519 KEY test@pc"} diff --git a/tests/test_ssh.py b/tests/test_ssh.py index 739d321..ec8b4b2 100644 --- a/tests/test_ssh.py +++ b/tests/test_ssh.py @@ -10,6 +10,7 @@ from selfprivacy_api.actions.ssh import ( get_ssh_settings, create_ssh_key, remove_ssh_key, + KeyNotFound, ) from selfprivacy_api.actions.users import get_users from selfprivacy_api.utils import WriteUserData, ReadUserData @@ -164,6 +165,29 @@ def test_removing_root_key_writes_json(generic_userdata): assert data["ssh"]["rootKeys"] == [] +def test_remove_root_key_on_undefined(generic_userdata): + # generic userdata has a a single root key + rootkeys = get_ssh_settings().rootKeys + assert len(rootkeys) == 1 + key1 = rootkeys[0] + + with WriteUserData() as data: + del data["ssh"]["rootKeys"] + + with pytest.raises(KeyNotFound): + remove_ssh_key("root", key1) + rootkeys = get_ssh_settings().rootKeys + assert len(rootkeys) == 0 + + with WriteUserData() as data: + del data["ssh"] + + with pytest.raises(KeyNotFound): + remove_ssh_key("root", key1) + rootkeys = get_ssh_settings().rootKeys + assert len(rootkeys) == 0 + + def test_adding_root_key_writes_json(generic_userdata): with WriteUserData() as data: del data["ssh"] From ee854aad1a975ba3b8e99451c564ab9f91b7b0e6 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 20 Dec 2023 11:42:29 +0000 Subject: [PATCH 191/246] test(ssh): delete rest test of undefined root key deletion --- tests/test_rest_endpoints/services/test_ssh.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/tests/test_rest_endpoints/services/test_ssh.py b/tests/test_rest_endpoints/services/test_ssh.py index cd7ed86..e9e668a 100644 --- a/tests/test_rest_endpoints/services/test_ssh.py +++ b/tests/test_rest_endpoints/services/test_ssh.py @@ -174,14 +174,6 @@ def test_delete_root_nonexistent_key(authorized_client, root_and_admin_have_keys ] -def test_delete_root_key_on_undefined(authorized_client, undefined_settings): - response = authorized_client.delete( - "/services/ssh/keys/root", json={"public_key": "ssh-ed25519 KEY test@pc"} - ) - assert response.status_code == 404 - assert "ssh" not in read_json(undefined_settings / "undefined.json") - - def test_get_admin_key(authorized_client, root_and_admin_have_keys): response = authorized_client.get("/services/ssh/keys/tester") assert response.status_code == 200 From 90c0c34a8d42302588a590f861462c883dcf3a83 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 20 Dec 2023 12:08:15 +0000 Subject: [PATCH 192/246] test(ssh): add root key when none --- tests/test_graphql/test_ssh.py | 13 +++++++++++-- tests/test_rest_endpoints/services/test_ssh.py | 10 ---------- 2 files changed, 11 insertions(+), 12 deletions(-) diff --git a/tests/test_graphql/test_ssh.py b/tests/test_graphql/test_ssh.py index a911fb1..effc3a7 100644 --- a/tests/test_graphql/test_ssh.py +++ b/tests/test_graphql/test_ssh.py @@ -5,6 +5,9 @@ import pytest from selfprivacy_api.graphql.mutations.system_mutations import SystemMutations from selfprivacy_api.graphql.queries.system import System +# only allowed in fixtures +from selfprivacy_api.actions.ssh import remove_ssh_key, get_ssh_settings + from tests.common import read_json, generate_system_query from tests.test_graphql.common import assert_empty, assert_ok, get_data @@ -43,6 +46,13 @@ def some_users(mocker, datadir): return datadir +@pytest.fixture +def no_rootkeys(generic_userdata): + for rootkey in get_ssh_settings().rootKeys: + remove_ssh_key("root", rootkey) + assert get_ssh_settings().rootKeys == [] + + # TESTS ######################################################## @@ -281,7 +291,7 @@ def test_graphql_add_ssh_key(authorized_client, some_users, mock_subprocess_pope ] -def test_graphql_add_root_ssh_key(authorized_client, some_users, mock_subprocess_popen): +def test_graphql_add_root_ssh_key(authorized_client, no_rootkeys): response = authorized_client.post( "/graphql", json={ @@ -303,7 +313,6 @@ def test_graphql_add_root_ssh_key(authorized_client, some_users, mock_subprocess assert response.json()["data"]["users"]["addSshKey"]["user"]["username"] == "root" assert response.json()["data"]["users"]["addSshKey"]["user"]["sshKeys"] == [ - "ssh-ed25519 KEY test@pc", "ssh-rsa KEY test_key@pc", ] diff --git a/tests/test_rest_endpoints/services/test_ssh.py b/tests/test_rest_endpoints/services/test_ssh.py index e9e668a..dfcce57 100644 --- a/tests/test_rest_endpoints/services/test_ssh.py +++ b/tests/test_rest_endpoints/services/test_ssh.py @@ -95,16 +95,6 @@ def some_users(mocker, datadir): ## PUT ON /ssh/key/send ###################################################### -def test_add_root_key(authorized_client, ssh_on): - response = authorized_client.put( - "/services/ssh/key/send", json={"public_key": "ssh-rsa KEY test@pc"} - ) - assert response.status_code == 201 - assert read_json(ssh_on / "turned_on.json")["ssh"]["rootKeys"] == [ - "ssh-rsa KEY test@pc", - ] - - def test_add_root_key_one_more(authorized_client, root_and_admin_have_keys): response = authorized_client.put( "/services/ssh/key/send", json={"public_key": "ssh-rsa KEY test@pc"} From e1db00e509f41f06df50d85f2f494c1e08b486d9 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 20 Dec 2023 12:41:29 +0000 Subject: [PATCH 193/246] test(ssh): add one more root key --- tests/test_graphql/test_ssh.py | 91 +++++++++++++++++++++++++++------- 1 file changed, 72 insertions(+), 19 deletions(-) diff --git a/tests/test_graphql/test_ssh.py b/tests/test_graphql/test_ssh.py index effc3a7..766b059 100644 --- a/tests/test_graphql/test_ssh.py +++ b/tests/test_graphql/test_ssh.py @@ -84,6 +84,7 @@ mutation enableSsh($settings: SSHSettingsInput!) { } } } + """ API_SSH_SETTINGS_QUERY = """ @@ -96,6 +97,15 @@ settings { """ +API_ROOTKEYS_QUERY = """ +settings { + ssh { + rootSshKeys + } +} +""" + + def api_ssh_settings_raw(client): return client.post( "/graphql", @@ -103,6 +113,40 @@ def api_ssh_settings_raw(client): ) +def api_rootkeys_raw(client): + return client.post( + "/graphql", + json={"query": generate_system_query([API_ROOTKEYS_QUERY])}, + ) + + +def api_add_ssh_key(authorized_client, user: str, key: str): + response = authorized_client.post( + "/graphql", + json={ + "query": API_CREATE_SSH_KEY_MUTATION, + "variables": { + "sshInput": { + "username": user, + "sshKey": key, + }, + }, + }, + ) + data = get_data(response) + result = data["users"]["addSshKey"] + assert result is not None + return result + + +def api_rootkeys(authorized_client): + response = api_rootkeys_raw(authorized_client) + data = get_data(response) + result = data["system"]["settings"]["ssh"]["rootSshKeys"] + assert result is not None + return result + + def api_ssh_settings(authorized_client): response = api_ssh_settings_raw(authorized_client) data = get_data(response) @@ -248,6 +292,9 @@ def test_graphql_enable_twice(authorized_client, some_users): assert_includes(api_ssh_settings(authorized_client), output) +############## KEYS + + def test_graphql_add_ssh_key_unauthorized(client, some_users, mock_subprocess_popen): response = client.post( "/graphql", @@ -292,30 +339,36 @@ def test_graphql_add_ssh_key(authorized_client, some_users, mock_subprocess_pope def test_graphql_add_root_ssh_key(authorized_client, no_rootkeys): - response = authorized_client.post( - "/graphql", - json={ - "query": API_CREATE_SSH_KEY_MUTATION, - "variables": { - "sshInput": { - "username": "root", - "sshKey": "ssh-rsa KEY test_key@pc", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None + output = api_add_ssh_key(authorized_client, "root", "ssh-rsa KEY test_key@pc") - assert response.json()["data"]["users"]["addSshKey"]["code"] == 201 - assert response.json()["data"]["users"]["addSshKey"]["message"] is not None - assert response.json()["data"]["users"]["addSshKey"]["success"] is True + assert output["code"] == 201 + assert output["message"] is not None + assert output["success"] is True - assert response.json()["data"]["users"]["addSshKey"]["user"]["username"] == "root" - assert response.json()["data"]["users"]["addSshKey"]["user"]["sshKeys"] == [ + assert output["user"]["username"] == "root" + assert output["user"]["sshKeys"] == ["ssh-rsa KEY test_key@pc"] + assert api_rootkeys(authorized_client) == ["ssh-rsa KEY test_key@pc"] + + +def test_graphql_add_root_ssh_key_one_more(authorized_client, no_rootkeys): + output = api_add_ssh_key(authorized_client, "root", "ssh-rsa KEY test_key@pc") + assert output["user"]["sshKeys"] == ["ssh-rsa KEY test_key@pc"] + + output = api_add_ssh_key(authorized_client, "root", "ssh-rsa KEY2 test_key@pc") + assert output["code"] == 201 + assert output["message"] is not None + assert output["success"] is True + + assert output["user"]["username"] == "root" + + expected_keys = [ "ssh-rsa KEY test_key@pc", + "ssh-rsa KEY2 test_key@pc", ] + assert output["user"]["sshKeys"] == expected_keys + assert api_rootkeys(authorized_client) == expected_keys + def test_graphql_add_main_ssh_key(authorized_client, some_users, mock_subprocess_popen): response = authorized_client.post( From 4b51f42e1b3a3035d7f762e5253b88ee1ce35dbb Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 20 Dec 2023 12:50:01 +0000 Subject: [PATCH 194/246] test(ssh): remove corresponding rest test --- tests/test_rest_endpoints/services/test_ssh.py | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/tests/test_rest_endpoints/services/test_ssh.py b/tests/test_rest_endpoints/services/test_ssh.py index dfcce57..5045149 100644 --- a/tests/test_rest_endpoints/services/test_ssh.py +++ b/tests/test_rest_endpoints/services/test_ssh.py @@ -94,20 +94,6 @@ def some_users(mocker, datadir): ## PUT ON /ssh/key/send ###################################################### - -def test_add_root_key_one_more(authorized_client, root_and_admin_have_keys): - response = authorized_client.put( - "/services/ssh/key/send", json={"public_key": "ssh-rsa KEY test@pc"} - ) - assert response.status_code == 201 - assert read_json(root_and_admin_have_keys / "root_and_admin_have_keys.json")["ssh"][ - "rootKeys" - ] == [ - "ssh-ed25519 KEY test@pc", - "ssh-rsa KEY test@pc", - ] - - def test_add_existing_root_key(authorized_client, root_and_admin_have_keys): response = authorized_client.put( "/services/ssh/key/send", json={"public_key": "ssh-ed25519 KEY test@pc"} From 641959a083e981870d792dee63a07f7427e83a0d Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 20 Dec 2023 13:09:43 +0000 Subject: [PATCH 195/246] test(ssh): adding same key --- tests/test_graphql/test_ssh.py | 9 ++++++++- tests/test_rest_endpoints/services/test_ssh.py | 14 -------------- 2 files changed, 8 insertions(+), 15 deletions(-) diff --git a/tests/test_graphql/test_ssh.py b/tests/test_graphql/test_ssh.py index 766b059..dc436cb 100644 --- a/tests/test_graphql/test_ssh.py +++ b/tests/test_graphql/test_ssh.py @@ -9,7 +9,7 @@ from selfprivacy_api.graphql.queries.system import System from selfprivacy_api.actions.ssh import remove_ssh_key, get_ssh_settings from tests.common import read_json, generate_system_query -from tests.test_graphql.common import assert_empty, assert_ok, get_data +from tests.test_graphql.common import assert_empty, assert_ok, get_data, assert_errorcode class ProcessMock: @@ -369,6 +369,13 @@ def test_graphql_add_root_ssh_key_one_more(authorized_client, no_rootkeys): assert output["user"]["sshKeys"] == expected_keys assert api_rootkeys(authorized_client) == expected_keys +def test_graphql_add_root_ssh_key_same(authorized_client, no_rootkeys): + key = "ssh-rsa KEY test_key@pc" + output = api_add_ssh_key(authorized_client, "root", key) + assert output["user"]["sshKeys"] == [key] + + output = api_add_ssh_key(authorized_client, "root", key) + assert_errorcode(output, 409) def test_graphql_add_main_ssh_key(authorized_client, some_users, mock_subprocess_popen): response = authorized_client.post( diff --git a/tests/test_rest_endpoints/services/test_ssh.py b/tests/test_rest_endpoints/services/test_ssh.py index 5045149..56a4020 100644 --- a/tests/test_rest_endpoints/services/test_ssh.py +++ b/tests/test_rest_endpoints/services/test_ssh.py @@ -92,20 +92,6 @@ def some_users(mocker, datadir): return datadir -## PUT ON /ssh/key/send ###################################################### - -def test_add_existing_root_key(authorized_client, root_and_admin_have_keys): - response = authorized_client.put( - "/services/ssh/key/send", json={"public_key": "ssh-ed25519 KEY test@pc"} - ) - assert response.status_code == 409 - assert read_json(root_and_admin_have_keys / "root_and_admin_have_keys.json")["ssh"][ - "rootKeys" - ] == [ - "ssh-ed25519 KEY test@pc", - ] - - ## /ssh/keys/{user} ###################################################### From 7f1fcd66e35d9f572feabec707ede5a64bbc73f2 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Thu, 21 Dec 2023 08:13:08 +0000 Subject: [PATCH 196/246] test(ssh): get root key --- tests/test_graphql/test_ssh.py | 13 ++++++++++++- tests/test_rest_endpoints/services/test_ssh.py | 6 ------ 2 files changed, 12 insertions(+), 7 deletions(-) diff --git a/tests/test_graphql/test_ssh.py b/tests/test_graphql/test_ssh.py index dc436cb..e1fe80f 100644 --- a/tests/test_graphql/test_ssh.py +++ b/tests/test_graphql/test_ssh.py @@ -9,7 +9,12 @@ from selfprivacy_api.graphql.queries.system import System from selfprivacy_api.actions.ssh import remove_ssh_key, get_ssh_settings from tests.common import read_json, generate_system_query -from tests.test_graphql.common import assert_empty, assert_ok, get_data, assert_errorcode +from tests.test_graphql.common import ( + assert_empty, + assert_ok, + get_data, + assert_errorcode, +) class ProcessMock: @@ -338,6 +343,10 @@ def test_graphql_add_ssh_key(authorized_client, some_users, mock_subprocess_pope ] +def test_graphql_get_root_key(authorized_client, some_users): + assert api_rootkeys(authorized_client) == ["ssh-ed25519 KEY test@pc"] + + def test_graphql_add_root_ssh_key(authorized_client, no_rootkeys): output = api_add_ssh_key(authorized_client, "root", "ssh-rsa KEY test_key@pc") @@ -369,6 +378,7 @@ def test_graphql_add_root_ssh_key_one_more(authorized_client, no_rootkeys): assert output["user"]["sshKeys"] == expected_keys assert api_rootkeys(authorized_client) == expected_keys + def test_graphql_add_root_ssh_key_same(authorized_client, no_rootkeys): key = "ssh-rsa KEY test_key@pc" output = api_add_ssh_key(authorized_client, "root", key) @@ -377,6 +387,7 @@ def test_graphql_add_root_ssh_key_same(authorized_client, no_rootkeys): output = api_add_ssh_key(authorized_client, "root", key) assert_errorcode(output, 409) + def test_graphql_add_main_ssh_key(authorized_client, some_users, mock_subprocess_popen): response = authorized_client.post( "/graphql", diff --git a/tests/test_rest_endpoints/services/test_ssh.py b/tests/test_rest_endpoints/services/test_ssh.py index 56a4020..f77e71f 100644 --- a/tests/test_rest_endpoints/services/test_ssh.py +++ b/tests/test_rest_endpoints/services/test_ssh.py @@ -95,12 +95,6 @@ def some_users(mocker, datadir): ## /ssh/keys/{user} ###################################################### -def test_get_root_key(authorized_client, root_and_admin_have_keys): - response = authorized_client.get("/services/ssh/keys/root") - assert response.status_code == 200 - assert response.json() == ["ssh-ed25519 KEY test@pc"] - - def test_get_root_key_when_none(authorized_client, ssh_on): response = authorized_client.get("/services/ssh/keys/root") assert response.status_code == 200 From cf2935938d7163265bc4a11790b1f6805a8523aa Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Thu, 21 Dec 2023 08:24:17 +0000 Subject: [PATCH 197/246] test(ssh): get root key when none --- tests/test_graphql/test_ssh.py | 4 ++++ tests/test_rest_endpoints/services/test_ssh.py | 6 ------ 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/tests/test_graphql/test_ssh.py b/tests/test_graphql/test_ssh.py index e1fe80f..752f808 100644 --- a/tests/test_graphql/test_ssh.py +++ b/tests/test_graphql/test_ssh.py @@ -347,6 +347,10 @@ def test_graphql_get_root_key(authorized_client, some_users): assert api_rootkeys(authorized_client) == ["ssh-ed25519 KEY test@pc"] +def test_graphql_get_root_key_when_none(authorized_client, no_rootkeys): + assert api_rootkeys(authorized_client) == [] + + def test_graphql_add_root_ssh_key(authorized_client, no_rootkeys): output = api_add_ssh_key(authorized_client, "root", "ssh-rsa KEY test_key@pc") diff --git a/tests/test_rest_endpoints/services/test_ssh.py b/tests/test_rest_endpoints/services/test_ssh.py index f77e71f..973adb6 100644 --- a/tests/test_rest_endpoints/services/test_ssh.py +++ b/tests/test_rest_endpoints/services/test_ssh.py @@ -95,12 +95,6 @@ def some_users(mocker, datadir): ## /ssh/keys/{user} ###################################################### -def test_get_root_key_when_none(authorized_client, ssh_on): - response = authorized_client.get("/services/ssh/keys/root") - assert response.status_code == 200 - assert response.json() == [] - - def test_delete_root_key(authorized_client, root_and_admin_have_keys): response = authorized_client.delete( "/services/ssh/keys/root", json={"public_key": "ssh-ed25519 KEY test@pc"} From 8fc7796da01802cf6d8f9a17f92c8413b4d76e30 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Thu, 21 Dec 2023 08:48:29 +0000 Subject: [PATCH 198/246] test(ssh): remove root key --- tests/test_graphql/test_ssh.py | 4 +--- tests/test_rest_endpoints/services/test_ssh.py | 17 ----------------- 2 files changed, 1 insertion(+), 20 deletions(-) diff --git a/tests/test_graphql/test_ssh.py b/tests/test_graphql/test_ssh.py index 752f808..b16bf4c 100644 --- a/tests/test_graphql/test_ssh.py +++ b/tests/test_graphql/test_ssh.py @@ -522,9 +522,7 @@ def test_graphql_remove_ssh_key(authorized_client, some_users, mock_subprocess_p assert response.json()["data"]["users"]["removeSshKey"]["user"]["sshKeys"] == [] -def test_graphql_remove_root_ssh_key( - authorized_client, some_users, mock_subprocess_popen -): +def test_graphql_remove_root_ssh_key(authorized_client, some_users): response = authorized_client.post( "/graphql", json={ diff --git a/tests/test_rest_endpoints/services/test_ssh.py b/tests/test_rest_endpoints/services/test_ssh.py index 973adb6..9eaaf17 100644 --- a/tests/test_rest_endpoints/services/test_ssh.py +++ b/tests/test_rest_endpoints/services/test_ssh.py @@ -95,23 +95,6 @@ def some_users(mocker, datadir): ## /ssh/keys/{user} ###################################################### -def test_delete_root_key(authorized_client, root_and_admin_have_keys): - response = authorized_client.delete( - "/services/ssh/keys/root", json={"public_key": "ssh-ed25519 KEY test@pc"} - ) - assert response.status_code == 200 - assert ( - "rootKeys" - not in read_json(root_and_admin_have_keys / "root_and_admin_have_keys.json")[ - "ssh" - ] - or read_json(root_and_admin_have_keys / "root_and_admin_have_keys.json")["ssh"][ - "rootKeys" - ] - == [] - ) - - def test_delete_root_nonexistent_key(authorized_client, root_and_admin_have_keys): response = authorized_client.delete( "/services/ssh/keys/root", json={"public_key": "ssh-rsa KEY test@pc"} From e7075546c5195386d74426dcf30ebb76150c6b18 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Thu, 21 Dec 2023 09:04:27 +0000 Subject: [PATCH 199/246] test(ssh): remove root key nonexistent --- tests/test_graphql/test_ssh.py | 46 ++++++++++++------- .../test_rest_endpoints/services/test_ssh.py | 12 ----- 2 files changed, 29 insertions(+), 29 deletions(-) diff --git a/tests/test_graphql/test_ssh.py b/tests/test_graphql/test_ssh.py index b16bf4c..cf71c78 100644 --- a/tests/test_graphql/test_ssh.py +++ b/tests/test_graphql/test_ssh.py @@ -144,6 +144,25 @@ def api_add_ssh_key(authorized_client, user: str, key: str): return result +def api_remove_ssh_key(authorized_client, user: str, key: str): + response = authorized_client.post( + "/graphql", + json={ + "query": API_REMOVE_SSH_KEY_MUTATION, + "variables": { + "sshInput": { + "username": user, + "sshKey": key, + }, + }, + }, + ) + data = get_data(response) + result = data["users"]["removeSshKey"] + assert result is not None + return result + + def api_rootkeys(authorized_client): response = api_rootkeys_raw(authorized_client) data = get_data(response) @@ -579,24 +598,17 @@ def test_graphql_remove_main_ssh_key( def test_graphql_remove_nonexistent_ssh_key( authorized_client, some_users, mock_subprocess_popen ): - response = authorized_client.post( - "/graphql", - json={ - "query": API_REMOVE_SSH_KEY_MUTATION, - "variables": { - "sshInput": { - "username": "user1", - "sshKey": "ssh-rsa KEY test_key@pc", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None + output = api_remove_ssh_key(authorized_client, "user1", "ssh-rsa KEY test_key@pc") + assert_errorcode(output, 404) - assert response.json()["data"]["users"]["removeSshKey"]["code"] == 404 - assert response.json()["data"]["users"]["removeSshKey"]["message"] is not None - assert response.json()["data"]["users"]["removeSshKey"]["success"] is False + +def test_graphql_remove_nonexistent_root_key( + authorized_client, some_users, mock_subprocess_popen +): + output = api_remove_ssh_key( + authorized_client, "root", "ssh-rsa gone in a puff of logic" + ) + assert_errorcode(output, 404) def test_graphql_remove_ssh_key_nonexistent_user( diff --git a/tests/test_rest_endpoints/services/test_ssh.py b/tests/test_rest_endpoints/services/test_ssh.py index 9eaaf17..3ede686 100644 --- a/tests/test_rest_endpoints/services/test_ssh.py +++ b/tests/test_rest_endpoints/services/test_ssh.py @@ -95,18 +95,6 @@ def some_users(mocker, datadir): ## /ssh/keys/{user} ###################################################### -def test_delete_root_nonexistent_key(authorized_client, root_and_admin_have_keys): - response = authorized_client.delete( - "/services/ssh/keys/root", json={"public_key": "ssh-rsa KEY test@pc"} - ) - assert response.status_code == 404 - assert read_json(root_and_admin_have_keys / "root_and_admin_have_keys.json")["ssh"][ - "rootKeys" - ] == [ - "ssh-ed25519 KEY test@pc", - ] - - def test_get_admin_key(authorized_client, root_and_admin_have_keys): response = authorized_client.get("/services/ssh/keys/tester") assert response.status_code == 200 From ac41cc00ced4ddf016ee88ffef9607e73dc65963 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Thu, 21 Dec 2023 10:08:34 +0000 Subject: [PATCH 200/246] test(ssh): admin keys getting --- tests/test_graphql/test_ssh.py | 49 +++++++++++++++++-- .../test_rest_endpoints/services/test_ssh.py | 12 ----- 2 files changed, 46 insertions(+), 15 deletions(-) diff --git a/tests/test_graphql/test_ssh.py b/tests/test_graphql/test_ssh.py index cf71c78..2d83eea 100644 --- a/tests/test_graphql/test_ssh.py +++ b/tests/test_graphql/test_ssh.py @@ -1,20 +1,23 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument import pytest +from typing import Optional from selfprivacy_api.graphql.mutations.system_mutations import SystemMutations from selfprivacy_api.graphql.queries.system import System -# only allowed in fixtures +# only allowed in fixtures and utils from selfprivacy_api.actions.ssh import remove_ssh_key, get_ssh_settings +from selfprivacy_api.actions.users import get_users, UserDataUserOrigin -from tests.common import read_json, generate_system_query +from tests.common import read_json, generate_system_query, generate_users_query from tests.test_graphql.common import ( assert_empty, assert_ok, get_data, assert_errorcode, ) +from tests.test_graphql.test_users import API_USERS_INFO class ProcessMock: @@ -58,6 +61,38 @@ def no_rootkeys(generic_userdata): assert get_ssh_settings().rootKeys == [] +@pytest.fixture +def no_admin_key(generic_userdata, authorized_client): + admin_keys = api_get_user_keys(authorized_client, admin_name()) + + for admin_key in admin_keys: + remove_ssh_key(admin_name(), admin_key) + + assert api_get_user_keys(authorized_client, admin_name()) == [] + + +def admin_name() -> Optional[str]: + users = get_users() + for user in users: + if user.origin == UserDataUserOrigin.PRIMARY: + return user.username + return None + + +def api_get_user_keys(authorized_client, user: str): + response = authorized_client.post( + "/graphql", + json={ + "query": generate_users_query([API_USERS_INFO]), + }, + ) + data = get_data(response)["users"]["allUsers"] + for _user in data: + if _user["username"] == user: + return _user["sshKeys"] + return None + + # TESTS ######################################################## @@ -370,6 +405,13 @@ def test_graphql_get_root_key_when_none(authorized_client, no_rootkeys): assert api_rootkeys(authorized_client) == [] +# Getting admin keys when they are present is tested in test_users.py + + +def test_get_admin_key_when_none(authorized_client, no_admin_key): + assert api_get_user_keys(authorized_client, admin_name()) == [] + + def test_graphql_add_root_ssh_key(authorized_client, no_rootkeys): output = api_add_ssh_key(authorized_client, "root", "ssh-rsa KEY test_key@pc") @@ -411,7 +453,7 @@ def test_graphql_add_root_ssh_key_same(authorized_client, no_rootkeys): assert_errorcode(output, 409) -def test_graphql_add_main_ssh_key(authorized_client, some_users, mock_subprocess_popen): +def test_graphql_add_admin_key(authorized_client, some_users): response = authorized_client.post( "/graphql", json={ @@ -438,6 +480,7 @@ def test_graphql_add_main_ssh_key(authorized_client, some_users, mock_subprocess ] +# TODO: multiplex for root and admin def test_graphql_add_bad_ssh_key(authorized_client, some_users, mock_subprocess_popen): response = authorized_client.post( "/graphql", diff --git a/tests/test_rest_endpoints/services/test_ssh.py b/tests/test_rest_endpoints/services/test_ssh.py index 3ede686..b9817c6 100644 --- a/tests/test_rest_endpoints/services/test_ssh.py +++ b/tests/test_rest_endpoints/services/test_ssh.py @@ -95,18 +95,6 @@ def some_users(mocker, datadir): ## /ssh/keys/{user} ###################################################### -def test_get_admin_key(authorized_client, root_and_admin_have_keys): - response = authorized_client.get("/services/ssh/keys/tester") - assert response.status_code == 200 - assert response.json() == ["ssh-rsa KEY test@pc"] - - -def test_get_admin_key_when_none(authorized_client, ssh_on): - response = authorized_client.get("/services/ssh/keys/tester") - assert response.status_code == 200 - assert response.json() == [] - - def test_delete_admin_key(authorized_client, root_and_admin_have_keys): response = authorized_client.delete( "/services/ssh/keys/tester", json={"public_key": "ssh-rsa KEY test@pc"} From f24aba8abb3c7bbc97edfa16903eb7ddae59ca00 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Thu, 21 Dec 2023 11:40:08 +0000 Subject: [PATCH 201/246] test(ssh): admin keys deleting --- tests/test_graphql/test_ssh.py | 41 ++++--------------- .../test_rest_endpoints/services/test_ssh.py | 21 ---------- 2 files changed, 9 insertions(+), 53 deletions(-) diff --git a/tests/test_graphql/test_ssh.py b/tests/test_graphql/test_ssh.py index 2d83eea..ee95cbe 100644 --- a/tests/test_graphql/test_ssh.py +++ b/tests/test_graphql/test_ssh.py @@ -93,9 +93,6 @@ def api_get_user_keys(authorized_client, user: str): return None -# TESTS ######################################################## - - API_CREATE_SSH_KEY_MUTATION = """ mutation addSshKey($sshInput: SshMutationInput!) { users { @@ -240,6 +237,9 @@ def api_set_ssh_settings(authorized_client, enable: bool, password_auth: bool): ) +# TESTS ######################################################## + + def test_graphql_ssh_query(authorized_client, some_users): settings = api_ssh_settings(authorized_client) assert settings["enable"] is True @@ -638,40 +638,17 @@ def test_graphql_remove_main_ssh_key( assert response.json()["data"]["users"]["removeSshKey"]["user"]["sshKeys"] == [] -def test_graphql_remove_nonexistent_ssh_key( - authorized_client, some_users, mock_subprocess_popen -): - output = api_remove_ssh_key(authorized_client, "user1", "ssh-rsa KEY test_key@pc") - assert_errorcode(output, 404) +key_users = ["root", "tester", "user1"] -def test_graphql_remove_nonexistent_root_key( - authorized_client, some_users, mock_subprocess_popen -): - output = api_remove_ssh_key( - authorized_client, "root", "ssh-rsa gone in a puff of logic" - ) +@pytest.mark.parametrize("user", key_users) +def test_graphql_remove_nonexistent_ssh_key(authorized_client, some_users, user): + output = api_remove_ssh_key(authorized_client, user, "ssh-rsa nonexistent") assert_errorcode(output, 404) def test_graphql_remove_ssh_key_nonexistent_user( authorized_client, some_users, mock_subprocess_popen ): - response = authorized_client.post( - "/graphql", - json={ - "query": API_REMOVE_SSH_KEY_MUTATION, - "variables": { - "sshInput": { - "username": "user666", - "sshKey": "ssh-rsa KEY test_key@pc", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - - assert response.json()["data"]["users"]["removeSshKey"]["code"] == 404 - assert response.json()["data"]["users"]["removeSshKey"]["message"] is not None - assert response.json()["data"]["users"]["removeSshKey"]["success"] is False + output = api_remove_ssh_key(authorized_client, "user666", "ssh-rsa KEY test_key@pc") + assert_errorcode(output, 404) diff --git a/tests/test_rest_endpoints/services/test_ssh.py b/tests/test_rest_endpoints/services/test_ssh.py index b9817c6..03dadb3 100644 --- a/tests/test_rest_endpoints/services/test_ssh.py +++ b/tests/test_rest_endpoints/services/test_ssh.py @@ -95,27 +95,6 @@ def some_users(mocker, datadir): ## /ssh/keys/{user} ###################################################### -def test_delete_admin_key(authorized_client, root_and_admin_have_keys): - response = authorized_client.delete( - "/services/ssh/keys/tester", json={"public_key": "ssh-rsa KEY test@pc"} - ) - assert response.status_code == 200 - assert ( - read_json(root_and_admin_have_keys / "root_and_admin_have_keys.json")["sshKeys"] - == [] - ) - - -def test_delete_nonexistent_admin_key(authorized_client, root_and_admin_have_keys): - response = authorized_client.delete( - "/services/ssh/keys/tester", json={"public_key": "ssh-rsa NO KEY test@pc"} - ) - assert response.status_code == 404 - assert read_json(root_and_admin_have_keys / "root_and_admin_have_keys.json")[ - "sshKeys" - ] == ["ssh-rsa KEY test@pc"] - - def test_delete_admin_key_on_undefined(authorized_client, undefined_settings): response = authorized_client.delete( "/services/ssh/keys/tester", json={"public_key": "ssh-rsa KEY test@pc"} From b1eec1e37bcdaf0e25f22334ba4a7926d8320cb2 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Thu, 21 Dec 2023 13:05:06 +0000 Subject: [PATCH 202/246] test(ssh): admin keys json storage tests --- .../test_rest_endpoints/services/test_ssh.py | 8 -- tests/test_ssh.py | 100 +++++++++++++++++- 2 files changed, 99 insertions(+), 9 deletions(-) diff --git a/tests/test_rest_endpoints/services/test_ssh.py b/tests/test_rest_endpoints/services/test_ssh.py index 03dadb3..10f7752 100644 --- a/tests/test_rest_endpoints/services/test_ssh.py +++ b/tests/test_rest_endpoints/services/test_ssh.py @@ -95,14 +95,6 @@ def some_users(mocker, datadir): ## /ssh/keys/{user} ###################################################### -def test_delete_admin_key_on_undefined(authorized_client, undefined_settings): - response = authorized_client.delete( - "/services/ssh/keys/tester", json={"public_key": "ssh-rsa KEY test@pc"} - ) - assert response.status_code == 404 - assert "sshKeys" not in read_json(undefined_settings / "undefined.json") - - def test_add_admin_key(authorized_client, ssh_on): response = authorized_client.post( "/services/ssh/keys/tester", json={"public_key": "ssh-rsa KEY test@pc"} diff --git a/tests/test_ssh.py b/tests/test_ssh.py index ec8b4b2..d333eef 100644 --- a/tests/test_ssh.py +++ b/tests/test_ssh.py @@ -4,6 +4,7 @@ Action-level tests of ssh """ import pytest +from typing import Optional from selfprivacy_api.actions.ssh import ( set_ssh_settings, @@ -12,7 +13,11 @@ from selfprivacy_api.actions.ssh import ( remove_ssh_key, KeyNotFound, ) -from selfprivacy_api.actions.users import get_users +from selfprivacy_api.actions.users import ( + get_users, + get_user_by_username, + UserDataUserOrigin, +) from selfprivacy_api.utils import WriteUserData, ReadUserData @@ -64,6 +69,14 @@ def password_auth_spectrum(request): return request.param +def admin_name() -> Optional[str]: + users = get_users() + for user in users: + if user.origin == UserDataUserOrigin.PRIMARY: + return user.username + return None + + def get_raw_json_ssh_setting(setting: str): with ReadUserData() as data: return (data.get("ssh") or {}).get(setting) @@ -121,6 +134,9 @@ def test_enabling_disabling_writes_json( ) +############### ROOTKEYS + + def test_read_root_keys_from_json(generic_userdata): assert get_ssh_settings().rootKeys == ["ssh-ed25519 KEY test@pc"] new_keys = ["ssh-ed25519 KEY test@pc", "ssh-ed25519 KEY2 test@pc"] @@ -216,3 +232,85 @@ def test_adding_root_key_writes_json(generic_userdata): assert "rootKeys" in data["ssh"] # order is irrelevant assert set(data["ssh"]["rootKeys"]) == set([key1, key2]) + + +############### ADMIN KEYS + + +def test_read_admin_keys_from_json(generic_userdata): + admin_name = "tester" + assert get_user_by_username(admin_name).ssh_keys == ["ssh-rsa KEY test@pc"] + new_keys = ["ssh-rsa KEY test@pc", "ssh-ed25519 KEY2 test@pc"] + + with WriteUserData() as data: + data["sshKeys"] = new_keys + + get_user_by_username(admin_name).ssh_keys == new_keys + + with WriteUserData() as data: + del data["sshKeys"] + + get_user_by_username(admin_name).ssh_keys == [] + + +def test_adding_admin_key_writes_json(generic_userdata): + admin_name = "tester" + + with WriteUserData() as data: + del data["sshKeys"] + key1 = "ssh-ed25519 KEY test@pc" + key2 = "ssh-ed25519 KEY2 test@pc" + create_ssh_key(admin_name, key1) + + with ReadUserData() as data: + assert "sshKeys" in data + assert data["sshKeys"] == [key1] + + create_ssh_key(admin_name, key2) + + with ReadUserData() as data: + assert "sshKeys" in data + # order is irrelevant + assert set(data["sshKeys"]) == set([key1, key2]) + + +def test_removing_admin_key_writes_json(generic_userdata): + # generic userdata has a a single root key + admin_name = "tester" + + admin_keys = get_user_by_username(admin_name).ssh_keys + assert len(admin_keys) == 1 + key1 = admin_keys[0] + key2 = "ssh-rsa MYSUPERKEY admin@pc" + + create_ssh_key(admin_name, key2) + admin_keys = get_user_by_username(admin_name).ssh_keys + assert len(admin_keys) == 2 + + remove_ssh_key(admin_name, key2) + + with ReadUserData() as data: + assert "sshKeys" in data + assert data["sshKeys"] == [key1] + + remove_ssh_key(admin_name, key1) + with ReadUserData() as data: + assert "sshKeys" in data + assert data["sshKeys"] == [] + + +def test_remove_admin_key_on_undefined(generic_userdata): + # generic userdata has a a single root key + admin_name = "tester" + + admin_keys = get_user_by_username(admin_name).ssh_keys + assert len(admin_keys) == 1 + key1 = admin_keys[0] + + with WriteUserData() as data: + del data["sshKeys"] + + with pytest.raises(KeyNotFound): + remove_ssh_key(admin_name, key1) + admin_keys = get_user_by_username(admin_name).ssh_keys + assert len(admin_keys) == 0 From 7c4c5929df5b78599f758346e418642372a849b1 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 22 Dec 2023 06:57:13 +0000 Subject: [PATCH 203/246] test(ssh): parametrized testing of ssh key addition --- tests/data/turned_on.json | 21 ++++- tests/test_graphql/test_ssh.py | 92 +++++++------------ .../test_rest_endpoints/services/test_ssh.py | 10 -- 3 files changed, 52 insertions(+), 71 deletions(-) diff --git a/tests/data/turned_on.json b/tests/data/turned_on.json index 5b41501..06e957a 100644 --- a/tests/data/turned_on.json +++ b/tests/data/turned_on.json @@ -32,5 +32,24 @@ "accountId": "ID", "accountKey": "KEY", "bucket": "selfprivacy" - } + }, + "users": [ + { + "username": "user1", + "hashedPassword": "HASHED_PASSWORD_1", + "sshKeys": [ + "ssh-rsa KEY user1@pc" + ] + }, + { + "username": "user2", + "hashedPassword": "HASHED_PASSWORD_2", + "sshKeys": [ + ] + }, + { + "username": "user3", + "hashedPassword": "HASHED_PASSWORD_3" + } + ] } diff --git a/tests/test_graphql/test_ssh.py b/tests/test_graphql/test_ssh.py index ee95cbe..2b2e521 100644 --- a/tests/test_graphql/test_ssh.py +++ b/tests/test_graphql/test_ssh.py @@ -19,6 +19,8 @@ from tests.test_graphql.common import ( ) from tests.test_graphql.test_users import API_USERS_INFO +key_users = ["root", "tester", "user1"] + class ProcessMock: """Mock subprocess.Popen""" @@ -61,6 +63,19 @@ def no_rootkeys(generic_userdata): assert get_ssh_settings().rootKeys == [] +@pytest.fixture +def no_keys(generic_userdata): + # this removes root and admin keys too + + users = get_users() + for user in users: + for key in user.ssh_keys: + remove_ssh_key(user.username, key) + users = get_users() + for user in users: + assert user.ssh_keys == [] + + @pytest.fixture def no_admin_key(generic_userdata, authorized_client): admin_keys = api_get_user_keys(authorized_client, admin_name()) @@ -370,31 +385,7 @@ def test_graphql_add_ssh_key_unauthorized(client, some_users, mock_subprocess_po assert_empty(response) -def test_graphql_add_ssh_key(authorized_client, some_users, mock_subprocess_popen): - response = authorized_client.post( - "/graphql", - json={ - "query": API_CREATE_SSH_KEY_MUTATION, - "variables": { - "sshInput": { - "username": "user1", - "sshKey": "ssh-rsa KEY test_key@pc", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - - assert response.json()["data"]["users"]["addSshKey"]["code"] == 201 - assert response.json()["data"]["users"]["addSshKey"]["message"] is not None - assert response.json()["data"]["users"]["addSshKey"]["success"] is True - - assert response.json()["data"]["users"]["addSshKey"]["user"]["username"] == "user1" - assert response.json()["data"]["users"]["addSshKey"]["user"]["sshKeys"] == [ - "ssh-rsa KEY user1@pc", - "ssh-rsa KEY test_key@pc", - ] +# Unauth getting of keys is tested in test_users.py because it is a part of users interface def test_graphql_get_root_key(authorized_client, some_users): @@ -412,16 +403,27 @@ def test_get_admin_key_when_none(authorized_client, no_admin_key): assert api_get_user_keys(authorized_client, admin_name()) == [] -def test_graphql_add_root_ssh_key(authorized_client, no_rootkeys): - output = api_add_ssh_key(authorized_client, "root", "ssh-rsa KEY test_key@pc") +@pytest.mark.parametrize("user", key_users) +def test_graphql_add_ssh_key_when_none(authorized_client, no_keys, user): + key1 = "ssh-rsa KEY test_key@pc" + if user == "root": + assert api_rootkeys(authorized_client) == [] + else: + assert api_get_user_keys(authorized_client, user) == [] + + output = api_add_ssh_key(authorized_client, user, key1) assert output["code"] == 201 assert output["message"] is not None assert output["success"] is True - assert output["user"]["username"] == "root" - assert output["user"]["sshKeys"] == ["ssh-rsa KEY test_key@pc"] - assert api_rootkeys(authorized_client) == ["ssh-rsa KEY test_key@pc"] + assert output["user"]["username"] == user + assert output["user"]["sshKeys"] == [key1] + + if user == "root": + assert api_rootkeys(authorized_client) == [key1] + else: + assert api_get_user_keys(authorized_client, user) == [key1] def test_graphql_add_root_ssh_key_one_more(authorized_client, no_rootkeys): @@ -453,33 +455,6 @@ def test_graphql_add_root_ssh_key_same(authorized_client, no_rootkeys): assert_errorcode(output, 409) -def test_graphql_add_admin_key(authorized_client, some_users): - response = authorized_client.post( - "/graphql", - json={ - "query": API_CREATE_SSH_KEY_MUTATION, - "variables": { - "sshInput": { - "username": "tester", - "sshKey": "ssh-rsa KEY test_key@pc", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - - assert response.json()["data"]["users"]["addSshKey"]["code"] == 201 - assert response.json()["data"]["users"]["addSshKey"]["message"] is not None - assert response.json()["data"]["users"]["addSshKey"]["success"] is True - - assert response.json()["data"]["users"]["addSshKey"]["user"]["username"] == "tester" - assert response.json()["data"]["users"]["addSshKey"]["user"]["sshKeys"] == [ - "ssh-rsa KEY test@pc", - "ssh-rsa KEY test_key@pc", - ] - - # TODO: multiplex for root and admin def test_graphql_add_bad_ssh_key(authorized_client, some_users, mock_subprocess_popen): response = authorized_client.post( @@ -638,9 +613,6 @@ def test_graphql_remove_main_ssh_key( assert response.json()["data"]["users"]["removeSshKey"]["user"]["sshKeys"] == [] -key_users = ["root", "tester", "user1"] - - @pytest.mark.parametrize("user", key_users) def test_graphql_remove_nonexistent_ssh_key(authorized_client, some_users, user): output = api_remove_ssh_key(authorized_client, user, "ssh-rsa nonexistent") diff --git a/tests/test_rest_endpoints/services/test_ssh.py b/tests/test_rest_endpoints/services/test_ssh.py index 10f7752..ebc60fb 100644 --- a/tests/test_rest_endpoints/services/test_ssh.py +++ b/tests/test_rest_endpoints/services/test_ssh.py @@ -95,16 +95,6 @@ def some_users(mocker, datadir): ## /ssh/keys/{user} ###################################################### -def test_add_admin_key(authorized_client, ssh_on): - response = authorized_client.post( - "/services/ssh/keys/tester", json={"public_key": "ssh-rsa KEY test@pc"} - ) - assert response.status_code == 201 - assert read_json(ssh_on / "turned_on.json")["sshKeys"] == [ - "ssh-rsa KEY test@pc", - ] - - def test_add_admin_key_one_more(authorized_client, root_and_admin_have_keys): response = authorized_client.post( "/services/ssh/keys/tester", json={"public_key": "ssh-rsa KEY_2 test@pc"} From 65c2023366c350daf4cf6746909482c008a057a8 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 22 Dec 2023 07:37:04 +0000 Subject: [PATCH 204/246] test(ssh): parametrized testing of ssh key addition, more --- tests/test_graphql/common.py | 4 ++-- tests/test_graphql/test_ssh.py | 36 +++++++++++++++++----------------- 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/tests/test_graphql/common.py b/tests/test_graphql/common.py index 286df67..5e6dc04 100644 --- a/tests/test_graphql/common.py +++ b/tests/test_graphql/common.py @@ -4,14 +4,14 @@ from tests.conftest import TOKENS_FILE_CONTENTS, DEVICE_WE_AUTH_TESTS_WITH ORIGINAL_DEVICES = TOKENS_FILE_CONTENTS["tokens"] -def assert_ok(output: dict) -> None: +def assert_ok(output: dict, code=200) -> None: if output["success"] is False: # convenience for debugging, this should display error # if message is empty, consider adding helpful messages raise ValueError(output["code"], output["message"]) assert output["success"] is True assert output["message"] is not None - assert output["code"] == 200 + assert output["code"] == code def assert_errorcode(output: dict, code) -> None: diff --git a/tests/test_graphql/test_ssh.py b/tests/test_graphql/test_ssh.py index 2b2e521..b27c5fe 100644 --- a/tests/test_graphql/test_ssh.py +++ b/tests/test_graphql/test_ssh.py @@ -413,9 +413,7 @@ def test_graphql_add_ssh_key_when_none(authorized_client, no_keys, user): output = api_add_ssh_key(authorized_client, user, key1) - assert output["code"] == 201 - assert output["message"] is not None - assert output["success"] is True + assert_ok(output, code=201) assert output["user"]["username"] == user assert output["user"]["sshKeys"] == [key1] @@ -426,24 +424,26 @@ def test_graphql_add_ssh_key_when_none(authorized_client, no_keys, user): assert api_get_user_keys(authorized_client, user) == [key1] -def test_graphql_add_root_ssh_key_one_more(authorized_client, no_rootkeys): - output = api_add_ssh_key(authorized_client, "root", "ssh-rsa KEY test_key@pc") - assert output["user"]["sshKeys"] == ["ssh-rsa KEY test_key@pc"] - - output = api_add_ssh_key(authorized_client, "root", "ssh-rsa KEY2 test_key@pc") - assert output["code"] == 201 - assert output["message"] is not None - assert output["success"] is True - - assert output["user"]["username"] == "root" - - expected_keys = [ +@pytest.mark.parametrize("user", key_users) +def test_graphql_add_ssh_key_one_more(authorized_client, no_keys, user): + keys = [ "ssh-rsa KEY test_key@pc", "ssh-rsa KEY2 test_key@pc", ] + output = api_add_ssh_key(authorized_client, user, keys[0]) + assert output["user"]["sshKeys"] == [keys[0]] - assert output["user"]["sshKeys"] == expected_keys - assert api_rootkeys(authorized_client) == expected_keys + output = api_add_ssh_key(authorized_client, user, keys[1]) + + assert_ok(output, code=201) + + assert output["user"]["username"] == user + assert output["user"]["sshKeys"] == keys + + if user == "root": + assert api_rootkeys(authorized_client) == keys + else: + assert api_get_user_keys(authorized_client, user) == keys def test_graphql_add_root_ssh_key_same(authorized_client, no_rootkeys): @@ -585,7 +585,7 @@ def test_graphql_remove_root_ssh_key(authorized_client, some_users): assert response.json()["data"]["users"]["removeSshKey"]["user"]["sshKeys"] == [] -def test_graphql_remove_main_ssh_key( +def test_graphql_remove_admin_ssh_key( authorized_client, some_users, mock_subprocess_popen ): response = authorized_client.post( From 16c2598e9b7a4d8a8b470faaa5e3145c33cec3da Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 22 Dec 2023 08:12:50 +0000 Subject: [PATCH 205/246] test(ssh): parametrized testing of ssh key addition, existing and invalid --- tests/test_graphql/test_ssh.py | 31 +++++-------------- .../test_rest_endpoints/services/test_ssh.py | 29 ----------------- 2 files changed, 8 insertions(+), 52 deletions(-) diff --git a/tests/test_graphql/test_ssh.py b/tests/test_graphql/test_ssh.py index b27c5fe..e6a619c 100644 --- a/tests/test_graphql/test_ssh.py +++ b/tests/test_graphql/test_ssh.py @@ -446,35 +446,20 @@ def test_graphql_add_ssh_key_one_more(authorized_client, no_keys, user): assert api_get_user_keys(authorized_client, user) == keys -def test_graphql_add_root_ssh_key_same(authorized_client, no_rootkeys): +@pytest.mark.parametrize("user", key_users) +def test_graphql_add_ssh_key_same(authorized_client, no_keys, user): key = "ssh-rsa KEY test_key@pc" - output = api_add_ssh_key(authorized_client, "root", key) + output = api_add_ssh_key(authorized_client, user, key) assert output["user"]["sshKeys"] == [key] - output = api_add_ssh_key(authorized_client, "root", key) + output = api_add_ssh_key(authorized_client, user, key) assert_errorcode(output, 409) -# TODO: multiplex for root and admin -def test_graphql_add_bad_ssh_key(authorized_client, some_users, mock_subprocess_popen): - response = authorized_client.post( - "/graphql", - json={ - "query": API_CREATE_SSH_KEY_MUTATION, - "variables": { - "sshInput": { - "username": "user1", - "sshKey": "trust me, this is the ssh key", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - - assert response.json()["data"]["users"]["addSshKey"]["code"] == 400 - assert response.json()["data"]["users"]["addSshKey"]["message"] is not None - assert response.json()["data"]["users"]["addSshKey"]["success"] is False +@pytest.mark.parametrize("user", key_users) +def test_graphql_add_bad_ssh_key(authorized_client, some_users, user): + output = api_add_ssh_key(authorized_client, user, "trust me, this is the ssh key") + assert_errorcode(output, 400) def test_graphql_add_ssh_key_nonexistent_user( diff --git a/tests/test_rest_endpoints/services/test_ssh.py b/tests/test_rest_endpoints/services/test_ssh.py index ebc60fb..ce9a39c 100644 --- a/tests/test_rest_endpoints/services/test_ssh.py +++ b/tests/test_rest_endpoints/services/test_ssh.py @@ -95,35 +95,6 @@ def some_users(mocker, datadir): ## /ssh/keys/{user} ###################################################### -def test_add_admin_key_one_more(authorized_client, root_and_admin_have_keys): - response = authorized_client.post( - "/services/ssh/keys/tester", json={"public_key": "ssh-rsa KEY_2 test@pc"} - ) - assert response.status_code == 201 - assert read_json(root_and_admin_have_keys / "root_and_admin_have_keys.json")[ - "sshKeys" - ] == ["ssh-rsa KEY test@pc", "ssh-rsa KEY_2 test@pc"] - - -def test_add_existing_admin_key(authorized_client, root_and_admin_have_keys): - response = authorized_client.post( - "/services/ssh/keys/tester", json={"public_key": "ssh-rsa KEY test@pc"} - ) - assert response.status_code == 409 - assert read_json(root_and_admin_have_keys / "root_and_admin_have_keys.json")[ - "sshKeys" - ] == [ - "ssh-rsa KEY test@pc", - ] - - -def test_add_invalid_admin_key(authorized_client, ssh_on): - response = authorized_client.post( - "/services/ssh/keys/tester", json={"public_key": "INVALID KEY test@pc"} - ) - assert response.status_code == 400 - - @pytest.mark.parametrize("user", [1, 2, 3]) def test_get_user_key(authorized_client, some_users, user): response = authorized_client.get(f"/services/ssh/keys/user{user}") From 42d96bcd6d0689519ac2c1fc36cfd160aae2b8d1 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 22 Dec 2023 08:25:20 +0000 Subject: [PATCH 206/246] test(ssh): remove rest user getting tests (they are covered by users tests --- tests/test_rest_endpoints/services/test_ssh.py | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/tests/test_rest_endpoints/services/test_ssh.py b/tests/test_rest_endpoints/services/test_ssh.py index ce9a39c..604b7cd 100644 --- a/tests/test_rest_endpoints/services/test_ssh.py +++ b/tests/test_rest_endpoints/services/test_ssh.py @@ -95,21 +95,6 @@ def some_users(mocker, datadir): ## /ssh/keys/{user} ###################################################### -@pytest.mark.parametrize("user", [1, 2, 3]) -def test_get_user_key(authorized_client, some_users, user): - response = authorized_client.get(f"/services/ssh/keys/user{user}") - assert response.status_code == 200 - if user == 1: - assert response.json() == ["ssh-rsa KEY user1@pc"] - else: - assert response.json() == [] - - -def test_get_keys_of_nonexistent_user(authorized_client, some_users): - response = authorized_client.get("/services/ssh/keys/user4") - assert response.status_code == 404 - - def test_get_keys_of_undefined_users(authorized_client, undefined_settings): response = authorized_client.get("/services/ssh/keys/user1") assert response.status_code == 404 From 946413615b344a8b4aba7414f2b1c1549e19f6c8 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 22 Dec 2023 08:34:45 +0000 Subject: [PATCH 207/246] test(ssh): dealing with undefined users --- tests/test_graphql/test_users.py | 36 ++++++++++++++++++- .../test_rest_endpoints/services/test_ssh.py | 5 --- 2 files changed, 35 insertions(+), 6 deletions(-) diff --git a/tests/test_graphql/test_users.py b/tests/test_graphql/test_users.py index af40981..e397600 100644 --- a/tests/test_graphql/test_users.py +++ b/tests/test_graphql/test_users.py @@ -6,7 +6,7 @@ from tests.common import ( generate_users_query, read_json, ) -from tests.test_graphql.common import assert_empty +from tests.test_graphql.common import assert_empty, assert_errorcode invalid_usernames = [ "messagebus", @@ -170,6 +170,23 @@ def test_graphql_get_no_users(authorized_client, no_users, mock_subprocess_popen ] +def test_graphql_get_users_undefined(authorized_client, undefined_settings): + response = authorized_client.post( + "/graphql", + json={ + "query": generate_users_query([API_USERS_INFO]), + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + + assert len(response.json()["data"]["users"]["allUsers"]) == 1 + assert response.json()["data"]["users"]["allUsers"][0]["username"] == "tester" + assert response.json()["data"]["users"]["allUsers"][0]["sshKeys"] == [ + "ssh-rsa KEY test@pc" + ] + + API_GET_USERS = """ query TestUsers($username: String!) { users { @@ -216,6 +233,23 @@ def test_graphql_get_one_user(authorized_client, one_user, mock_subprocess_popen ] +def test_graphql_get_some_user_undefined(authorized_client, undefined_settings): + + response = authorized_client.post( + "/graphql", + json={ + "query": API_GET_USERS, + "variables": { + "username": "user1", + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + + assert response.json()["data"]["users"]["getUser"] is None + + def test_graphql_get_some_user(authorized_client, some_users, mock_subprocess_popen): response = authorized_client.post( "/graphql", diff --git a/tests/test_rest_endpoints/services/test_ssh.py b/tests/test_rest_endpoints/services/test_ssh.py index 604b7cd..beadbd2 100644 --- a/tests/test_rest_endpoints/services/test_ssh.py +++ b/tests/test_rest_endpoints/services/test_ssh.py @@ -95,11 +95,6 @@ def some_users(mocker, datadir): ## /ssh/keys/{user} ###################################################### -def test_get_keys_of_undefined_users(authorized_client, undefined_settings): - response = authorized_client.get("/services/ssh/keys/user1") - assert response.status_code == 404 - - @pytest.mark.parametrize("user", [1, 2, 3]) def test_add_user_key(authorized_client, some_users, user): response = authorized_client.post( From ca4b3c972d2cac2a2f919ec943c4ede65134692d Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 22 Dec 2023 08:49:54 +0000 Subject: [PATCH 208/246] test(ssh): regular users --- tests/test_graphql/test_ssh.py | 2 +- .../test_rest_endpoints/services/test_ssh.py | 36 ------------------- 2 files changed, 1 insertion(+), 37 deletions(-) diff --git a/tests/test_graphql/test_ssh.py b/tests/test_graphql/test_ssh.py index e6a619c..f810598 100644 --- a/tests/test_graphql/test_ssh.py +++ b/tests/test_graphql/test_ssh.py @@ -19,7 +19,7 @@ from tests.test_graphql.common import ( ) from tests.test_graphql.test_users import API_USERS_INFO -key_users = ["root", "tester", "user1"] +key_users = ["root", "tester", "user1", "user2", "user3"] class ProcessMock: diff --git a/tests/test_rest_endpoints/services/test_ssh.py b/tests/test_rest_endpoints/services/test_ssh.py index beadbd2..3851230 100644 --- a/tests/test_rest_endpoints/services/test_ssh.py +++ b/tests/test_rest_endpoints/services/test_ssh.py @@ -95,42 +95,6 @@ def some_users(mocker, datadir): ## /ssh/keys/{user} ###################################################### -@pytest.mark.parametrize("user", [1, 2, 3]) -def test_add_user_key(authorized_client, some_users, user): - response = authorized_client.post( - f"/services/ssh/keys/user{user}", json={"public_key": "ssh-ed25519 KEY test@pc"} - ) - assert response.status_code == 201 - if user == 1: - assert read_json(some_users / "some_users.json")["users"][user - 1][ - "sshKeys" - ] == [ - "ssh-rsa KEY user1@pc", - "ssh-ed25519 KEY test@pc", - ] - else: - assert read_json(some_users / "some_users.json")["users"][user - 1][ - "sshKeys" - ] == ["ssh-ed25519 KEY test@pc"] - - -def test_add_existing_user_key(authorized_client, some_users): - response = authorized_client.post( - "/services/ssh/keys/user1", json={"public_key": "ssh-rsa KEY user1@pc"} - ) - assert response.status_code == 409 - assert read_json(some_users / "some_users.json")["users"][0]["sshKeys"] == [ - "ssh-rsa KEY user1@pc", - ] - - -def test_add_invalid_user_key(authorized_client, some_users): - response = authorized_client.post( - "/services/ssh/keys/user1", json={"public_key": "INVALID KEY user1@pc"} - ) - assert response.status_code == 400 - - def test_delete_user_key(authorized_client, some_users): response = authorized_client.delete( "/services/ssh/keys/user1", json={"public_key": "ssh-rsa KEY user1@pc"} From b120858fa17aff01365769619abc8c38c37339aa Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 22 Dec 2023 09:04:07 +0000 Subject: [PATCH 209/246] test(ssh): parametrized removing keys --- tests/test_graphql/test_ssh.py | 93 ++++--------------- .../test_rest_endpoints/services/test_ssh.py | 8 -- 2 files changed, 17 insertions(+), 84 deletions(-) diff --git a/tests/test_graphql/test_ssh.py b/tests/test_graphql/test_ssh.py index f810598..2a2c259 100644 --- a/tests/test_graphql/test_ssh.py +++ b/tests/test_graphql/test_ssh.py @@ -518,84 +518,25 @@ def test_graphql_remove_ssh_key_unauthorized(client, some_users, mock_subprocess assert_empty(response) -def test_graphql_remove_ssh_key(authorized_client, some_users, mock_subprocess_popen): - response = authorized_client.post( - "/graphql", - json={ - "query": API_REMOVE_SSH_KEY_MUTATION, - "variables": { - "sshInput": { - "username": "user1", - "sshKey": "ssh-rsa KEY user1@pc", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None +@pytest.mark.parametrize("user", key_users) +def test_graphql_remove_ssh_key(authorized_client, no_keys, user): + keys = [ + "ssh-rsa KEY test_key@pc", + "ssh-rsa KEY2 test_key@pc", + ] + output = api_add_ssh_key(authorized_client, user, keys[0]) + output = api_add_ssh_key(authorized_client, user, keys[1]) + assert output["user"]["sshKeys"] == keys - assert response.json()["data"]["users"]["removeSshKey"]["code"] == 200 - assert response.json()["data"]["users"]["removeSshKey"]["message"] is not None - assert response.json()["data"]["users"]["removeSshKey"]["success"] is True + output = api_remove_ssh_key(authorized_client, user, keys[1]) + assert_ok(output) + assert output["user"]["username"] == user + assert output["user"]["sshKeys"] == [keys[0]] - assert ( - response.json()["data"]["users"]["removeSshKey"]["user"]["username"] == "user1" - ) - assert response.json()["data"]["users"]["removeSshKey"]["user"]["sshKeys"] == [] - - -def test_graphql_remove_root_ssh_key(authorized_client, some_users): - response = authorized_client.post( - "/graphql", - json={ - "query": API_REMOVE_SSH_KEY_MUTATION, - "variables": { - "sshInput": { - "username": "root", - "sshKey": "ssh-ed25519 KEY test@pc", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - - assert response.json()["data"]["users"]["removeSshKey"]["code"] == 200 - assert response.json()["data"]["users"]["removeSshKey"]["message"] is not None - assert response.json()["data"]["users"]["removeSshKey"]["success"] is True - - assert ( - response.json()["data"]["users"]["removeSshKey"]["user"]["username"] == "root" - ) - assert response.json()["data"]["users"]["removeSshKey"]["user"]["sshKeys"] == [] - - -def test_graphql_remove_admin_ssh_key( - authorized_client, some_users, mock_subprocess_popen -): - response = authorized_client.post( - "/graphql", - json={ - "query": API_REMOVE_SSH_KEY_MUTATION, - "variables": { - "sshInput": { - "username": "tester", - "sshKey": "ssh-rsa KEY test@pc", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - - assert response.json()["data"]["users"]["removeSshKey"]["code"] == 200 - assert response.json()["data"]["users"]["removeSshKey"]["message"] is not None - assert response.json()["data"]["users"]["removeSshKey"]["success"] is True - - assert ( - response.json()["data"]["users"]["removeSshKey"]["user"]["username"] == "tester" - ) - assert response.json()["data"]["users"]["removeSshKey"]["user"]["sshKeys"] == [] + if user == "root": + assert api_rootkeys(authorized_client) == [keys[0]] + else: + assert api_get_user_keys(authorized_client, user) == [keys[0]] @pytest.mark.parametrize("user", key_users) diff --git a/tests/test_rest_endpoints/services/test_ssh.py b/tests/test_rest_endpoints/services/test_ssh.py index 3851230..452e5d5 100644 --- a/tests/test_rest_endpoints/services/test_ssh.py +++ b/tests/test_rest_endpoints/services/test_ssh.py @@ -95,14 +95,6 @@ def some_users(mocker, datadir): ## /ssh/keys/{user} ###################################################### -def test_delete_user_key(authorized_client, some_users): - response = authorized_client.delete( - "/services/ssh/keys/user1", json={"public_key": "ssh-rsa KEY user1@pc"} - ) - assert response.status_code == 200 - assert read_json(some_users / "some_users.json")["users"][0]["sshKeys"] == [] - - @pytest.mark.parametrize("user", [2, 3]) def test_delete_nonexistent_user_key(authorized_client, some_users, user): response = authorized_client.delete( From c5bb18215bd3d2bab7a7196607907b262afe9c8d Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 22 Dec 2023 09:07:20 +0000 Subject: [PATCH 210/246] test(ssh): delete redundant ssh tests --- .../test_rest_endpoints/services/test_ssh.py | 29 ------------------- 1 file changed, 29 deletions(-) diff --git a/tests/test_rest_endpoints/services/test_ssh.py b/tests/test_rest_endpoints/services/test_ssh.py index 452e5d5..cb91f96 100644 --- a/tests/test_rest_endpoints/services/test_ssh.py +++ b/tests/test_rest_endpoints/services/test_ssh.py @@ -95,28 +95,6 @@ def some_users(mocker, datadir): ## /ssh/keys/{user} ###################################################### -@pytest.mark.parametrize("user", [2, 3]) -def test_delete_nonexistent_user_key(authorized_client, some_users, user): - response = authorized_client.delete( - f"/services/ssh/keys/user{user}", json={"public_key": "ssh-rsa KEY user1@pc"} - ) - assert response.status_code == 404 - if user == 2: - assert ( - read_json(some_users / "some_users.json")["users"][user - 1]["sshKeys"] - == [] - ) - if user == 3: - "sshKeys" not in read_json(some_users / "some_users.json")["users"][user - 1] - - -def test_add_keys_of_nonexistent_user(authorized_client, some_users): - response = authorized_client.post( - "/services/ssh/keys/user4", json={"public_key": "ssh-rsa KEY user4@pc"} - ) - assert response.status_code == 404 - - def test_add_key_on_undefined_users(authorized_client, undefined_settings): response = authorized_client.post( "/services/ssh/keys/user1", json={"public_key": "ssh-rsa KEY user4@pc"} @@ -124,13 +102,6 @@ def test_add_key_on_undefined_users(authorized_client, undefined_settings): assert response.status_code == 404 -def test_delete_keys_of_nonexistent_user(authorized_client, some_users): - response = authorized_client.delete( - "/services/ssh/keys/user4", json={"public_key": "ssh-rsa KEY user4@pc"} - ) - assert response.status_code == 404 - - def test_delete_key_when_undefined_users(authorized_client, undefined_settings): response = authorized_client.delete( "/services/ssh/keys/user1", json={"public_key": "ssh-rsa KEY user1@pc"} From bc45a48af37eb4b534928a92fc80ca842e3762de Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 22 Dec 2023 09:40:03 +0000 Subject: [PATCH 211/246] test(ssh): json storage of user keys : reading --- tests/data/turned_on.json | 10 ++-- tests/test_ssh.py | 103 +++++++++++++++++++++++++++++++++++++- 2 files changed, 105 insertions(+), 8 deletions(-) diff --git a/tests/data/turned_on.json b/tests/data/turned_on.json index 06e957a..2c98e77 100644 --- a/tests/data/turned_on.json +++ b/tests/data/turned_on.json @@ -37,19 +37,17 @@ { "username": "user1", "hashedPassword": "HASHED_PASSWORD_1", - "sshKeys": [ - "ssh-rsa KEY user1@pc" - ] + "sshKeys": ["ssh-rsa KEY user1@pc"] }, { "username": "user2", "hashedPassword": "HASHED_PASSWORD_2", - "sshKeys": [ - ] + "sshKeys": ["ssh-rsa KEY user2@pc"] }, { "username": "user3", - "hashedPassword": "HASHED_PASSWORD_3" + "hashedPassword": "HASHED_PASSWORD_3", + "sshKeys": ["ssh-rsa KEY user3@pc"] } ] } diff --git a/tests/test_ssh.py b/tests/test_ssh.py index d333eef..e65ebc0 100644 --- a/tests/test_ssh.py +++ b/tests/test_ssh.py @@ -245,12 +245,12 @@ def test_read_admin_keys_from_json(generic_userdata): with WriteUserData() as data: data["sshKeys"] = new_keys - get_user_by_username(admin_name).ssh_keys == new_keys + assert get_user_by_username(admin_name).ssh_keys == new_keys with WriteUserData() as data: del data["sshKeys"] - get_user_by_username(admin_name).ssh_keys == [] + assert get_user_by_username(admin_name).ssh_keys == [] def test_adding_admin_key_writes_json(generic_userdata): @@ -314,3 +314,102 @@ def test_remove_admin_key_on_undefined(generic_userdata): remove_ssh_key(admin_name, key1) admin_keys = get_user_by_username(admin_name).ssh_keys assert len(admin_keys) == 0 + + +############### USER KEYS + +regular_users = ["user1", "user2", "user3"] + + +def find_user_index_in_json_users(users: list, username: str) -> Optional[int]: + for i, user in enumerate(users): + if user["username"] == username: + return i + return None + + +@pytest.mark.parametrize("username", regular_users) +def test_read_user_keys_from_json(generic_userdata, username): + old_keys = [f"ssh-rsa KEY {username}@pc"] + assert get_user_by_username(username).ssh_keys == old_keys + new_keys = ["ssh-rsa KEY test@pc", "ssh-ed25519 KEY2 test@pc"] + + with WriteUserData() as data: + user_index = find_user_index_in_json_users(data["users"], username) + data["users"][user_index]["sshKeys"] = new_keys + + assert get_user_by_username(username).ssh_keys == new_keys + + with WriteUserData() as data: + user_index = find_user_index_in_json_users(data["users"], username) + del data["users"][user_index]["sshKeys"] + + assert get_user_by_username(username).ssh_keys == [] + + # deeper deletions are for user getter tests, not here + + +# @pytest.mark.parametrize("username", regular_users) +# def test_adding_user_key_writes_json(generic_userdata, regular_users): +# admin_name = "tester" + +# with WriteUserData() as data: +# del data["sshKeys"] +# key1 = "ssh-ed25519 KEY test@pc" +# key2 = "ssh-ed25519 KEY2 test@pc" +# create_ssh_key(admin_name, key1) + +# with ReadUserData() as data: +# assert "sshKeys" in data +# assert data["sshKeys"] == [key1] + +# create_ssh_key(admin_name, key2) + +# with ReadUserData() as data: +# assert "sshKeys" in data +# # order is irrelevant +# assert set(data["sshKeys"]) == set([key1, key2]) + + +# @pytest.mark.parametrize("username", regular_users) +# def test_removing_user_key_writes_json(generic_userdata, regular_users): +# # generic userdata has a a single root key +# admin_name = "tester" + +# admin_keys = get_user_by_username(admin_name).ssh_keys +# assert len(admin_keys) == 1 +# key1 = admin_keys[0] +# key2 = "ssh-rsa MYSUPERKEY admin@pc" + +# create_ssh_key(admin_name, key2) +# admin_keys = get_user_by_username(admin_name).ssh_keys +# assert len(admin_keys) == 2 + +# remove_ssh_key(admin_name, key2) + +# with ReadUserData() as data: +# assert "sshKeys" in data +# assert data["sshKeys"] == [key1] + +# remove_ssh_key(admin_name, key1) +# with ReadUserData() as data: +# assert "sshKeys" in data +# assert data["sshKeys"] == [] + + +# @pytest.mark.parametrize("username", regular_users) +# def test_remove_user_key_on_undefined(generic_userdata, regular_users): +# # generic userdata has a a single root key +# admin_name = "tester" + +# admin_keys = get_user_by_username(admin_name).ssh_keys +# assert len(admin_keys) == 1 +# key1 = admin_keys[0] + +# with WriteUserData() as data: +# del data["sshKeys"] + +# with pytest.raises(KeyNotFound): +# remove_ssh_key(admin_name, key1) +# admin_keys = get_user_by_username(admin_name).ssh_keys +# assert len(admin_keys) == 0 From 0669dc117b1161184081fd38513773569ffdeeae Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 22 Dec 2023 09:49:27 +0000 Subject: [PATCH 212/246] test(ssh): user key storage test: adding --- tests/test_ssh.py | 38 ++++++++++++++++++++------------------ 1 file changed, 20 insertions(+), 18 deletions(-) diff --git a/tests/test_ssh.py b/tests/test_ssh.py index e65ebc0..5832a2a 100644 --- a/tests/test_ssh.py +++ b/tests/test_ssh.py @@ -349,30 +349,32 @@ def test_read_user_keys_from_json(generic_userdata, username): # deeper deletions are for user getter tests, not here -# @pytest.mark.parametrize("username", regular_users) -# def test_adding_user_key_writes_json(generic_userdata, regular_users): -# admin_name = "tester" +@pytest.mark.parametrize("username", regular_users) +def test_adding_user_key_writes_json(generic_userdata, username): -# with WriteUserData() as data: -# del data["sshKeys"] -# key1 = "ssh-ed25519 KEY test@pc" -# key2 = "ssh-ed25519 KEY2 test@pc" -# create_ssh_key(admin_name, key1) + with WriteUserData() as data: + user_index = find_user_index_in_json_users(data["users"], username) + del data["users"][user_index]["sshKeys"] + key1 = "ssh-ed25519 KEY test@pc" + key2 = "ssh-ed25519 KEY2 test@pc" + create_ssh_key(username, key1) -# with ReadUserData() as data: -# assert "sshKeys" in data -# assert data["sshKeys"] == [key1] + with ReadUserData() as data: + user_index = find_user_index_in_json_users(data["users"], username) + assert "sshKeys" in data["users"][user_index] + assert data["users"][user_index]["sshKeys"] == [key1] -# create_ssh_key(admin_name, key2) + create_ssh_key(username, key2) -# with ReadUserData() as data: -# assert "sshKeys" in data -# # order is irrelevant -# assert set(data["sshKeys"]) == set([key1, key2]) + with ReadUserData() as data: + user_index = find_user_index_in_json_users(data["users"], username) + assert "sshKeys" in data["users"][user_index] + # order is irrelevant + assert set(data["users"][user_index]["sshKeys"]) == set([key1, key2]) # @pytest.mark.parametrize("username", regular_users) -# def test_removing_user_key_writes_json(generic_userdata, regular_users): +# def test_removing_user_key_writes_json(generic_userdata, username): # # generic userdata has a a single root key # admin_name = "tester" @@ -398,7 +400,7 @@ def test_read_user_keys_from_json(generic_userdata, username): # @pytest.mark.parametrize("username", regular_users) -# def test_remove_user_key_on_undefined(generic_userdata, regular_users): +# def test_remove_user_key_on_undefined(generic_userdata, username): # # generic userdata has a a single root key # admin_name = "tester" From 7377c6375a643c77fb25c8c71da67cc4870fc5da Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 22 Dec 2023 09:57:35 +0000 Subject: [PATCH 213/246] test(ssh): user key storage test: removing --- tests/test_ssh.py | 39 ++++++++++++++++++++------------------- 1 file changed, 20 insertions(+), 19 deletions(-) diff --git a/tests/test_ssh.py b/tests/test_ssh.py index 5832a2a..e2726d4 100644 --- a/tests/test_ssh.py +++ b/tests/test_ssh.py @@ -373,30 +373,31 @@ def test_adding_user_key_writes_json(generic_userdata, username): assert set(data["users"][user_index]["sshKeys"]) == set([key1, key2]) -# @pytest.mark.parametrize("username", regular_users) -# def test_removing_user_key_writes_json(generic_userdata, username): -# # generic userdata has a a single root key -# admin_name = "tester" +@pytest.mark.parametrize("username", regular_users) +def test_removing_user_key_writes_json(generic_userdata, username): + # generic userdata has a a single root key -# admin_keys = get_user_by_username(admin_name).ssh_keys -# assert len(admin_keys) == 1 -# key1 = admin_keys[0] -# key2 = "ssh-rsa MYSUPERKEY admin@pc" + user_keys = get_user_by_username(username).ssh_keys + assert len(user_keys) == 1 + key1 = user_keys[0] + key2 = "ssh-rsa MYSUPERKEY admin@pc" -# create_ssh_key(admin_name, key2) -# admin_keys = get_user_by_username(admin_name).ssh_keys -# assert len(admin_keys) == 2 + create_ssh_key(username, key2) + user_keys = get_user_by_username(username).ssh_keys + assert len(user_keys) == 2 -# remove_ssh_key(admin_name, key2) + remove_ssh_key(username, key2) -# with ReadUserData() as data: -# assert "sshKeys" in data -# assert data["sshKeys"] == [key1] + with ReadUserData() as data: + user_index = find_user_index_in_json_users(data["users"], username) + assert "sshKeys" in data["users"][user_index] + assert data["users"][user_index]["sshKeys"] == [key1] -# remove_ssh_key(admin_name, key1) -# with ReadUserData() as data: -# assert "sshKeys" in data -# assert data["sshKeys"] == [] + remove_ssh_key(username, key1) + with ReadUserData() as data: + user_index = find_user_index_in_json_users(data["users"], username) + assert "sshKeys" in data["users"][user_index] + assert data["users"][user_index]["sshKeys"] == [] # @pytest.mark.parametrize("username", regular_users) From 03feab76b0192c3d203e974e6de5d2b126499882 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 22 Dec 2023 10:09:51 +0000 Subject: [PATCH 214/246] test(ssh): user key storage test: removing user keys on undefined --- tests/test_ssh.py | 46 ++++++++++++++++++++++++++++++---------------- 1 file changed, 30 insertions(+), 16 deletions(-) diff --git a/tests/test_ssh.py b/tests/test_ssh.py index e2726d4..a688a63 100644 --- a/tests/test_ssh.py +++ b/tests/test_ssh.py @@ -12,6 +12,7 @@ from selfprivacy_api.actions.ssh import ( create_ssh_key, remove_ssh_key, KeyNotFound, + UserNotFound, ) from selfprivacy_api.actions.users import ( get_users, @@ -275,7 +276,7 @@ def test_adding_admin_key_writes_json(generic_userdata): def test_removing_admin_key_writes_json(generic_userdata): - # generic userdata has a a single root key + # generic userdata has a a single admin key admin_name = "tester" admin_keys = get_user_by_username(admin_name).ssh_keys @@ -300,7 +301,7 @@ def test_removing_admin_key_writes_json(generic_userdata): def test_remove_admin_key_on_undefined(generic_userdata): - # generic userdata has a a single root key + # generic userdata has a a single admin key admin_name = "tester" admin_keys = get_user_by_username(admin_name).ssh_keys @@ -375,7 +376,7 @@ def test_adding_user_key_writes_json(generic_userdata, username): @pytest.mark.parametrize("username", regular_users) def test_removing_user_key_writes_json(generic_userdata, username): - # generic userdata has a a single root key + # generic userdata has a a single user key user_keys = get_user_by_username(username).ssh_keys assert len(user_keys) == 1 @@ -400,19 +401,32 @@ def test_removing_user_key_writes_json(generic_userdata, username): assert data["users"][user_index]["sshKeys"] == [] -# @pytest.mark.parametrize("username", regular_users) -# def test_remove_user_key_on_undefined(generic_userdata, username): -# # generic userdata has a a single root key -# admin_name = "tester" +@pytest.mark.parametrize("username", regular_users) +def test_remove_user_key_on_undefined(generic_userdata, username): + # generic userdata has a a single user key + user_keys = get_user_by_username(username).ssh_keys + assert len(user_keys) == 1 + key1 = user_keys[0] -# admin_keys = get_user_by_username(admin_name).ssh_keys -# assert len(admin_keys) == 1 -# key1 = admin_keys[0] + with WriteUserData() as data: + user_index = find_user_index_in_json_users(data["users"], username) + del data["users"][user_index]["sshKeys"] -# with WriteUserData() as data: -# del data["sshKeys"] + with pytest.raises(KeyNotFound): + remove_ssh_key(username, key1) -# with pytest.raises(KeyNotFound): -# remove_ssh_key(admin_name, key1) -# admin_keys = get_user_by_username(admin_name).ssh_keys -# assert len(admin_keys) == 0 + user_keys = get_user_by_username(username).ssh_keys + assert len(user_keys) == 0 + + with WriteUserData() as data: + user_index = find_user_index_in_json_users(data["users"], username) + del data["users"][user_index] + + with pytest.raises(UserNotFound): + remove_ssh_key(username, key1) + + with WriteUserData() as data: + del data["users"] + + with pytest.raises(UserNotFound): + remove_ssh_key(username, key1) From 04e3ee821f16fd12beea08535ef67032c099cbe0 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 22 Dec 2023 10:11:37 +0000 Subject: [PATCH 215/246] test(ssh): remove the rest of rest ssh tests bc redundant --- .../test_rest_endpoints/services/data/gitkeep | 0 .../test_rest_endpoints/services/test_ssh.py | 109 ------------------ .../services/test_ssh/all_off.json | 57 --------- .../test_ssh/root_and_admin_have_keys.json | 57 --------- .../services/test_ssh/some_users.json | 76 ------------ .../services/test_ssh/turned_off.json | 51 -------- .../services/test_ssh/turned_on.json | 51 -------- .../services/test_ssh/undefined.json | 47 -------- .../services/test_ssh/undefined_values.json | 51 -------- 9 files changed, 499 deletions(-) delete mode 100644 tests/test_rest_endpoints/services/data/gitkeep delete mode 100644 tests/test_rest_endpoints/services/test_ssh.py delete mode 100644 tests/test_rest_endpoints/services/test_ssh/all_off.json delete mode 100644 tests/test_rest_endpoints/services/test_ssh/root_and_admin_have_keys.json delete mode 100644 tests/test_rest_endpoints/services/test_ssh/some_users.json delete mode 100644 tests/test_rest_endpoints/services/test_ssh/turned_off.json delete mode 100644 tests/test_rest_endpoints/services/test_ssh/turned_on.json delete mode 100644 tests/test_rest_endpoints/services/test_ssh/undefined.json delete mode 100644 tests/test_rest_endpoints/services/test_ssh/undefined_values.json diff --git a/tests/test_rest_endpoints/services/data/gitkeep b/tests/test_rest_endpoints/services/data/gitkeep deleted file mode 100644 index e69de29..0000000 diff --git a/tests/test_rest_endpoints/services/test_ssh.py b/tests/test_rest_endpoints/services/test_ssh.py deleted file mode 100644 index cb91f96..0000000 --- a/tests/test_rest_endpoints/services/test_ssh.py +++ /dev/null @@ -1,109 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -import json -import pytest - - -def read_json(file_path): - with open(file_path, "r", encoding="utf-8") as file: - return json.load(file) - - -## FIXTURES ################################################### - - -@pytest.fixture -def ssh_off(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_off.json") - assert not read_json(datadir / "turned_off.json")["ssh"]["enable"] - assert read_json(datadir / "turned_off.json")["ssh"]["passwordAuthentication"] - return datadir - - -@pytest.fixture -def ssh_on(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_on.json") - assert read_json(datadir / "turned_off.json")["ssh"]["passwordAuthentication"] - assert read_json(datadir / "turned_on.json")["ssh"]["enable"] - return datadir - - -@pytest.fixture -def all_off(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "all_off.json") - assert not read_json(datadir / "all_off.json")["ssh"]["passwordAuthentication"] - assert not read_json(datadir / "all_off.json")["ssh"]["enable"] - return datadir - - -@pytest.fixture -def undefined_settings(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "undefined.json") - assert "ssh" not in read_json(datadir / "undefined.json") - return datadir - - -@pytest.fixture -def undefined_values(mocker, datadir): - mocker.patch( - "selfprivacy_api.utils.USERDATA_FILE", new=datadir / "undefined_values.json" - ) - assert "ssh" in read_json(datadir / "undefined_values.json") - assert "enable" not in read_json(datadir / "undefined_values.json")["ssh"] - assert ( - "passwordAuthentication" - not in read_json(datadir / "undefined_values.json")["ssh"] - ) - return datadir - - -@pytest.fixture -def root_and_admin_have_keys(mocker, datadir): - mocker.patch( - "selfprivacy_api.utils.USERDATA_FILE", - new=datadir / "root_and_admin_have_keys.json", - ) - assert read_json(datadir / "root_and_admin_have_keys.json")["ssh"]["enable"] - assert read_json(datadir / "root_and_admin_have_keys.json")["ssh"][ - "passwordAuthentication" - ] - assert read_json(datadir / "root_and_admin_have_keys.json")["ssh"]["rootKeys"] == [ - "ssh-ed25519 KEY test@pc" - ] - assert read_json(datadir / "root_and_admin_have_keys.json")["sshKeys"] == [ - "ssh-rsa KEY test@pc" - ] - return datadir - - -@pytest.fixture -def some_users(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "some_users.json") - assert "users" in read_json(datadir / "some_users.json") - assert read_json(datadir / "some_users.json")["users"] == [ - { - "username": "user1", - "hashedPassword": "HASHED_PASSWORD_1", - "sshKeys": ["ssh-rsa KEY user1@pc"], - }, - {"username": "user2", "hashedPassword": "HASHED_PASSWORD_2", "sshKeys": []}, - {"username": "user3", "hashedPassword": "HASHED_PASSWORD_3"}, - ] - return datadir - - -## /ssh/keys/{user} ###################################################### - - -def test_add_key_on_undefined_users(authorized_client, undefined_settings): - response = authorized_client.post( - "/services/ssh/keys/user1", json={"public_key": "ssh-rsa KEY user4@pc"} - ) - assert response.status_code == 404 - - -def test_delete_key_when_undefined_users(authorized_client, undefined_settings): - response = authorized_client.delete( - "/services/ssh/keys/user1", json={"public_key": "ssh-rsa KEY user1@pc"} - ) - assert response.status_code == 404 diff --git a/tests/test_rest_endpoints/services/test_ssh/all_off.json b/tests/test_rest_endpoints/services/test_ssh/all_off.json deleted file mode 100644 index 051d364..0000000 --- a/tests/test_rest_endpoints/services/test_ssh/all_off.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": false, - "passwordAuthentication": false, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ssh/root_and_admin_have_keys.json b/tests/test_rest_endpoints/services/test_ssh/root_and_admin_have_keys.json deleted file mode 100644 index c1691ea..0000000 --- a/tests/test_rest_endpoints/services/test_ssh/root_and_admin_have_keys.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ssh/some_users.json b/tests/test_rest_endpoints/services/test_ssh/some_users.json deleted file mode 100644 index df6380a..0000000 --- a/tests/test_rest_endpoints/services/test_ssh/some_users.json +++ /dev/null @@ -1,76 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "users": [ - { - "username": "user1", - "hashedPassword": "HASHED_PASSWORD_1", - "sshKeys": [ - "ssh-rsa KEY user1@pc" - ] - }, - { - "username": "user2", - "hashedPassword": "HASHED_PASSWORD_2", - "sshKeys": [ - ] - }, - { - "username": "user3", - "hashedPassword": "HASHED_PASSWORD_3" - } - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ssh/turned_off.json b/tests/test_rest_endpoints/services/test_ssh/turned_off.json deleted file mode 100644 index 3856c80..0000000 --- a/tests/test_rest_endpoints/services/test_ssh/turned_off.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": false, - "passwordAuthentication": true - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ssh/turned_on.json b/tests/test_rest_endpoints/services/test_ssh/turned_on.json deleted file mode 100644 index e60c57f..0000000 --- a/tests/test_rest_endpoints/services/test_ssh/turned_on.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ssh/undefined.json b/tests/test_rest_endpoints/services/test_ssh/undefined.json deleted file mode 100644 index 7c9af37..0000000 --- a/tests/test_rest_endpoints/services/test_ssh/undefined.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ssh/undefined_values.json b/tests/test_rest_endpoints/services/test_ssh/undefined_values.json deleted file mode 100644 index b7b03d3..0000000 --- a/tests/test_rest_endpoints/services/test_ssh/undefined_values.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": {}, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file From 2f25329c434396168b9b374dcbc91c10a1044cb5 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 22 Dec 2023 11:31:56 +0000 Subject: [PATCH 216/246] refactor(backup): remove a redundant constant --- selfprivacy_api/backup/tasks.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/selfprivacy_api/backup/tasks.py b/selfprivacy_api/backup/tasks.py index a948bff..6520c70 100644 --- a/selfprivacy_api/backup/tasks.py +++ b/selfprivacy_api/backup/tasks.py @@ -18,8 +18,6 @@ from selfprivacy_api.backup import Backups from selfprivacy_api.jobs import Jobs, JobStatus, Job -SNAPSHOT_CACHE_TTL_HOURS = 6 - SNAPSHOT_CACHE_TTL_HOURS = 6 @@ -35,9 +33,7 @@ def validate_datetime(dt: datetime) -> bool: # huey tasks need to return something @huey.task() -def start_backup( - service_id: str, reason: BackupReason = BackupReason.EXPLICIT -) -> bool: +def start_backup(service_id: str, reason: BackupReason = BackupReason.EXPLICIT) -> bool: """ The worker task that starts the backup process. """ From bcbe1ff50c3175994ea3182569f94aba6fb3f835 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 25 Dec 2023 13:49:36 +0000 Subject: [PATCH 217/246] refactor(dkim): do not use popen --- selfprivacy_api/utils/__init__.py | 43 +++++++------- tests/test_dkim.py | 95 ++++++------------------------- tests/test_graphql/test_system.py | 4 +- 3 files changed, 44 insertions(+), 98 deletions(-) diff --git a/selfprivacy_api/utils/__init__.py b/selfprivacy_api/utils/__init__.py index 5263b89..08bc61f 100644 --- a/selfprivacy_api/utils/__init__.py +++ b/selfprivacy_api/utils/__init__.py @@ -13,6 +13,7 @@ USERDATA_FILE = "/etc/nixos/userdata/userdata.json" TOKENS_FILE = "/etc/nixos/userdata/tokens.json" JOBS_FILE = "/etc/nixos/userdata/jobs.json" DOMAIN_FILE = "/var/domain" +DKIM_DIR = "/var/dkim/" class UserDataFiles(Enum): @@ -167,27 +168,31 @@ def parse_date(date_str: str) -> datetime.datetime: raise ValueError("Invalid date string") +def parse_dkim(dkim: str) -> str: + # extract key from file + dkim = dkim.split("(")[1] + dkim = dkim.split(")")[0] + # replace all quotes with nothing + dkim = dkim.replace('"', "") + # trim whitespace, remove newlines and tabs + dkim = dkim.strip() + dkim = dkim.replace("\n", "") + dkim = dkim.replace("\t", "") + # remove all redundant spaces + dkim = " ".join(dkim.split()) + return dkim + + def get_dkim_key(domain: str, parse: bool = True) -> typing.Optional[str]: """Get DKIM key from /var/dkim/.selector.txt""" - if os.path.exists("/var/dkim/" + domain + ".selector.txt"): - # Is this really neccessary to use Popen here? - cat_process = subprocess.Popen( - ["cat", "/var/dkim/" + domain + ".selector.txt"], stdout=subprocess.PIPE - ) - dkim = cat_process.communicate()[0] - if parse: - # Extract key from file - dkim = dkim.split(b"(")[1] - dkim = dkim.split(b")")[0] - # Replace all quotes with nothing - dkim = dkim.replace(b'"', b"") - # Trim whitespace, remove newlines and tabs - dkim = dkim.strip() - dkim = dkim.replace(b"\n", b"") - dkim = dkim.replace(b"\t", b"") - # Remove all redundant spaces - dkim = b" ".join(dkim.split()) - return str(dkim, "utf-8") + + dkim_path = os.path.join(DKIM_DIR, domain + ".selector.txt") + if os.path.exists(dkim_path): + with open(dkim_path, encoding="utf-8") as dkim_file: + dkim = dkim_file.read() + if parse: + dkim = parse_dkim(dkim) + return dkim return None diff --git a/tests/test_dkim.py b/tests/test_dkim.py index c9662d0..949bb19 100644 --- a/tests/test_dkim.py +++ b/tests/test_dkim.py @@ -1,68 +1,30 @@ import pytest -import typing +import os from os import path -from unittest.mock import DEFAULT from tests.conftest import global_data_dir from selfprivacy_api.utils import get_dkim_key, get_domain -import selfprivacy_api.utils as utils ############################################################################### - -class ProcessMock: - """Mock subprocess.Popen""" - - def __init__(self, args, **kwargs): - self.args = args - self.kwargs = kwargs - - def communicate(): - return ( - b'selector._domainkey\tIN\tTXT\t( "v=DKIM1; k=rsa; "\n\t "p=MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDNn/IhEz1SxgHxxxI8vlPYC2dNueiLe1GC4SYz8uHimC8SDkMvAwm7rqi2SimbFgGB5nccCNOqCkrIqJTCB9vufqBnVKAjshHqpOr5hk4JJ1T/AGQKWinstmDbfTLPYTbU8ijZrwwGeqQLlnXR5nSN0GB9GazheA9zaPsT6PV+aQIDAQAB" ) ; ----- DKIM key selector for test-domain.tld\n', - None, - ) - - -class NoFileMock(ProcessMock): - def communicate(): - return (b"", None) - - -def _path_exists_with_masked_paths(filepath, masked_paths: typing.List[str]): - if filepath in masked_paths: - return False - else: - # this will cause the mocker to return the standard path.exists output - # see https://docs.python.org/3/library/unittest.mock.html#unittest.mock.Mock.side_effect - return DEFAULT - - -def path_exists_func_but_with_masked_paths(masked_paths: typing.List[str]): - """ - Sometimes we do not want to pretend that no files exist at all, but that only specific files do not exist - This provides the needed path.exists function for some arbitrary list of masked paths - """ - return lambda x: _path_exists_with_masked_paths(x, masked_paths) +DKIM_FILE_CONTENT = b'selector._domainkey\tIN\tTXT\t( "v=DKIM1; k=rsa; "\n\t "p=MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDNn/IhEz1SxgHxxxI8vlPYC2dNueiLe1GC4SYz8uHimC8SDkMvAwm7rqi2SimbFgGB5nccCNOqCkrIqJTCB9vufqBnVKAjshHqpOr5hk4JJ1T/AGQKWinstmDbfTLPYTbU8ijZrwwGeqQLlnXR5nSN0GB9GazheA9zaPsT6PV+aQIDAQAB" ) ; ----- DKIM key selector for test-domain.tld\n' @pytest.fixture -def mock_all_paths_exist(mocker): - mock = mocker.patch("os.path.exists", autospec=True, return_value=True) - return mock +def dkim_file(mocker, domain_file, tmpdir): + domain = get_domain() + assert domain is not None + assert domain != "" + filename = domain + ".selector.txt" + dkim_path = path.join(tmpdir, filename) -@pytest.fixture -def mock_subproccess_popen_dkimfile(mocker): - mock = mocker.patch("subprocess.Popen", autospec=True, return_value=ProcessMock) - return mock + with open(dkim_path, "wb") as file: + file.write(DKIM_FILE_CONTENT) - -@pytest.fixture -def mock_subproccess_popen(mocker): - mock = mocker.patch("subprocess.Popen", autospec=True, return_value=NoFileMock) - return mock + mocker.patch("selfprivacy_api.utils.DKIM_DIR", tmpdir) + return dkim_path @pytest.fixture @@ -74,46 +36,25 @@ def domain_file(mocker): @pytest.fixture -def mock_no_dkim_file(mocker): - """ - Should have domain mocks - """ - domain = utils.get_domain() - # try: - # domain = get_domain() - # except Exception as e: - # domain = "" - - masked_files = ["/var/dkim/" + domain + ".selector.txt"] - mock = mocker.patch( - "os.path.exists", - side_effect=path_exists_func_but_with_masked_paths(masked_files), - ) - return mock +def no_dkim_file(dkim_file): + os.remove(dkim_file) + assert path.exists(dkim_file) is False + return dkim_file ############################################################################### -def test_get_dkim_key( - mock_subproccess_popen_dkimfile, mock_all_paths_exist, domain_file -): +def test_get_dkim_key(domain_file, dkim_file): """Test DKIM key""" dkim_key = get_dkim_key("test-domain.tld") assert ( dkim_key == "v=DKIM1; k=rsa; p=MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDNn/IhEz1SxgHxxxI8vlPYC2dNueiLe1GC4SYz8uHimC8SDkMvAwm7rqi2SimbFgGB5nccCNOqCkrIqJTCB9vufqBnVKAjshHqpOr5hk4JJ1T/AGQKWinstmDbfTLPYTbU8ijZrwwGeqQLlnXR5nSN0GB9GazheA9zaPsT6PV+aQIDAQAB" ) - assert mock_subproccess_popen_dkimfile.call_args[0][0] == [ - "cat", - "/var/dkim/test-domain.tld.selector.txt", - ] -def test_no_dkim_key( - authorized_client, domain_file, mock_no_dkim_file, mock_subproccess_popen -): +def test_no_dkim_key(domain_file, no_dkim_file): """Test no DKIM key""" dkim_key = get_dkim_key("test-domain.tld") assert dkim_key is None - assert mock_subproccess_popen.called == False diff --git a/tests/test_graphql/test_system.py b/tests/test_graphql/test_system.py index b6b4362..c318fe7 100644 --- a/tests/test_graphql/test_system.py +++ b/tests/test_graphql/test_system.py @@ -6,7 +6,7 @@ import pytest from tests.common import generate_system_query, read_json from tests.test_graphql.common import assert_empty -from tests.test_dkim import mock_no_dkim_file +from tests.test_dkim import no_dkim_file, dkim_file @pytest.fixture @@ -338,7 +338,7 @@ def test_graphql_get_domain_no_dkim( domain_file, mock_get_ip4, mock_get_ip6, - mock_no_dkim_file, + no_dkim_file, turned_on, ): """Test no DKIM file situation gets properly handled""" From 4b2eda25f605330782d068d635b76fea7e3622dc Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 25 Dec 2023 14:38:59 +0000 Subject: [PATCH 218/246] test(service, backup): move dummy service fixtures to conftest.py --- tests/conftest.py | 64 +++++++++++++++++++++++++-- tests/test_backup.py | 2 - tests/test_block_device_utils.py | 1 - tests/test_common.py | 61 ------------------------- tests/test_graphql/test_api_backup.py | 1 - tests/test_graphql/test_services.py | 1 - tests/test_services.py | 2 - 7 files changed, 60 insertions(+), 72 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index f058997..fddd32f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -3,12 +3,19 @@ # pylint: disable=unused-argument import os import pytest -from os import path - -from fastapi.testclient import TestClient -import os.path as path import datetime +from os import path +from os import makedirs +from typing import Generator +from fastapi.testclient import TestClient + +from selfprivacy_api.utils.huey import huey + +import selfprivacy_api.services as services +from selfprivacy_api.services import get_service_by_id, Service +from selfprivacy_api.services.test_service import DummyService + from selfprivacy_api.models.tokens.token import Token from selfprivacy_api.repositories.tokens.json_tokens_repository import ( JsonTokensRepository, @@ -19,6 +26,9 @@ from selfprivacy_api.repositories.tokens.redis_tokens_repository import ( from tests.common import read_json +TESTFILE_BODY = "testytest!" +TESTFILE_2_BODY = "testissimo!" + EMPTY_TOKENS_JSON = ' {"tokens": []}' @@ -147,3 +157,49 @@ def wrong_auth_client(tokens_file, huey_database, jobs_file): client = TestClient(app) client.headers.update({"Authorization": "Bearer WRONG_TOKEN"}) return client + + +@pytest.fixture() +def raw_dummy_service(tmpdir): + dirnames = ["test_service", "also_test_service"] + service_dirs = [] + for d in dirnames: + service_dir = path.join(tmpdir, d) + makedirs(service_dir) + service_dirs.append(service_dir) + + testfile_path_1 = path.join(service_dirs[0], "testfile.txt") + with open(testfile_path_1, "w") as file: + file.write(TESTFILE_BODY) + + testfile_path_2 = path.join(service_dirs[1], "testfile2.txt") + with open(testfile_path_2, "w") as file: + file.write(TESTFILE_2_BODY) + + # we need this to not change get_folders() much + class TestDummyService(DummyService, folders=service_dirs): + pass + + service = TestDummyService() + # assert pickle.dumps(service) is not None + return service + + +@pytest.fixture() +def dummy_service( + tmpdir, raw_dummy_service, generic_userdata +) -> Generator[Service, None, None]: + service = raw_dummy_service + + # register our service + services.services.append(service) + + huey.immediate = True + assert huey.immediate is True + + assert get_service_by_id(service.get_id()) is not None + service.enable() + yield service + + # cleanup because apparently it matters wrt tasks + services.services.remove(service) diff --git a/tests/test_backup.py b/tests/test_backup.py index bb9e217..036dd42 100644 --- a/tests/test_backup.py +++ b/tests/test_backup.py @@ -16,8 +16,6 @@ import tempfile from selfprivacy_api.utils.huey import huey -from tests.test_common import dummy_service, raw_dummy_service - from selfprivacy_api.services import Service, get_all_services from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services.service import ServiceStatus diff --git a/tests/test_block_device_utils.py b/tests/test_block_device_utils.py index 0fa99f1..7a85c50 100644 --- a/tests/test_block_device_utils.py +++ b/tests/test_block_device_utils.py @@ -13,7 +13,6 @@ from selfprivacy_api.utils.block_devices import ( resize_block_device, ) from tests.common import read_json -from tests.test_common import dummy_service, raw_dummy_service SINGLE_LSBLK_OUTPUT = b""" { diff --git a/tests/test_common.py b/tests/test_common.py index 5c433a0..7dd3652 100644 --- a/tests/test_common.py +++ b/tests/test_common.py @@ -1,70 +1,9 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument -import json import os import pytest from selfprivacy_api.utils import WriteUserData, ReadUserData -from selfprivacy_api.utils.huey import huey - -from os import path -from os import makedirs -from typing import Generator - -# import pickle -import selfprivacy_api.services as services -from selfprivacy_api.services import get_service_by_id, Service -from selfprivacy_api.services.test_service import DummyService - - -TESTFILE_BODY = "testytest!" -TESTFILE_2_BODY = "testissimo!" - - -@pytest.fixture() -def raw_dummy_service(tmpdir): - dirnames = ["test_service", "also_test_service"] - service_dirs = [] - for d in dirnames: - service_dir = path.join(tmpdir, d) - makedirs(service_dir) - service_dirs.append(service_dir) - - testfile_path_1 = path.join(service_dirs[0], "testfile.txt") - with open(testfile_path_1, "w") as file: - file.write(TESTFILE_BODY) - - testfile_path_2 = path.join(service_dirs[1], "testfile2.txt") - with open(testfile_path_2, "w") as file: - file.write(TESTFILE_2_BODY) - - # we need this to not change get_folders() much - class TestDummyService(DummyService, folders=service_dirs): - pass - - service = TestDummyService() - # assert pickle.dumps(service) is not None - return service - - -@pytest.fixture() -def dummy_service( - tmpdir, raw_dummy_service, generic_userdata -) -> Generator[Service, None, None]: - service = raw_dummy_service - - # register our service - services.services.append(service) - - huey.immediate = True - assert huey.immediate is True - - assert get_service_by_id(service.get_id()) is not None - service.enable() - yield service - - # cleanup because apparently it matters wrt tasks - services.services.remove(service) def test_get_api_version(authorized_client): diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index 675c1b8..18d5d15 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -1,6 +1,5 @@ from os import path from tests.test_backup import backups -from tests.test_common import raw_dummy_service, dummy_service from tests.common import generate_backup_query diff --git a/tests/test_graphql/test_services.py b/tests/test_graphql/test_services.py index bd3e373..1c1374a 100644 --- a/tests/test_graphql/test_services.py +++ b/tests/test_graphql/test_services.py @@ -8,7 +8,6 @@ from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services.service import Service, ServiceStatus from selfprivacy_api.services.test_service import DummyService -from tests.test_common import raw_dummy_service, dummy_service from tests.common import generate_service_query from tests.test_graphql.common import assert_empty, assert_ok, get_data diff --git a/tests/test_services.py b/tests/test_services.py index 3addf05..09784e9 100644 --- a/tests/test_services.py +++ b/tests/test_services.py @@ -15,8 +15,6 @@ from selfprivacy_api.services.generic_service_mover import FolderMoveNames from selfprivacy_api.services.test_service import DummyService from selfprivacy_api.services.service import Service, ServiceStatus, StoppedService -from tests.test_common import raw_dummy_service, dummy_service - def test_unimplemented_folders_raises(): with raises(NotImplementedError): From 6ade95bbf1a8ac55ce83b29399aa748326ac38d3 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 25 Dec 2023 16:36:40 +0000 Subject: [PATCH 219/246] test(service): refactor systemctl calltests --- tests/test_services_dkim.py | 67 ++++++++++++++----------------------- 1 file changed, 25 insertions(+), 42 deletions(-) diff --git a/tests/test_services_dkim.py b/tests/test_services_dkim.py index 02998c2..8b247e0 100644 --- a/tests/test_services_dkim.py +++ b/tests/test_services_dkim.py @@ -9,42 +9,24 @@ from selfprivacy_api.services.ocserv import Ocserv from selfprivacy_api.services.pleroma import Pleroma +def expected_status_call(service_name: str): + return ["systemctl", "show", service_name] + + def call_args_asserts(mocked_object): assert mocked_object.call_count == 7 - assert mocked_object.call_args_list[0][0][0] == [ - "systemctl", - "show", - "dovecot2.service", - ] - assert mocked_object.call_args_list[1][0][0] == [ - "systemctl", - "show", - "postfix.service", - ] - assert mocked_object.call_args_list[2][0][0] == [ - "systemctl", - "show", - "vaultwarden.service", - ] - assert mocked_object.call_args_list[3][0][0] == [ - "systemctl", - "show", - "gitea.service", - ] - assert mocked_object.call_args_list[4][0][0] == [ - "systemctl", - "show", - "phpfpm-nextcloud.service", - ] - assert mocked_object.call_args_list[5][0][0] == [ - "systemctl", - "show", - "ocserv.service", - ] - assert mocked_object.call_args_list[6][0][0] == [ - "systemctl", - "show", - "pleroma.service", + calls = [callargs[0][0] for callargs in mocked_object.call_args_list] + assert calls == [ + expected_status_call(service) + for service in [ + "dovecot2.service", + "postfix.service", + "vaultwarden.service", + "gitea.service", + "phpfpm-nextcloud.service", + "ocserv.service", + "pleroma.service", + ] ] @@ -74,7 +56,7 @@ SubState=exited @pytest.fixture -def mock_subproccess_popen(mocker): +def mock_popen_systemctl_service_ok(mocker): mock = mocker.patch( "subprocess.check_output", autospec=True, return_value=SUCCESSFUL_STATUS ) @@ -82,7 +64,7 @@ def mock_subproccess_popen(mocker): @pytest.fixture -def mock_broken_service(mocker): +def mock_popen_systemctl_service_not_ok(mocker): mock = mocker.patch( "subprocess.check_output", autospec=True, return_value=FAILED_STATUS ) @@ -91,21 +73,22 @@ def mock_broken_service(mocker): ############################################################################### -def test_dkim_key(authorized_client, mock_subproccess_popen): - assert MailServer.get_status() == ServiceStatus.ACTIVE + +def test_systemctl_ok(mock_popen_systemctl_service_ok): + assert MailServer.get_status() == ServiceStatus.ACTIVE assert Bitwarden.get_status() == ServiceStatus.ACTIVE assert Gitea.get_status() == ServiceStatus.ACTIVE assert Nextcloud.get_status() == ServiceStatus.ACTIVE assert Ocserv.get_status() == ServiceStatus.ACTIVE assert Pleroma.get_status() == ServiceStatus.ACTIVE - call_args_asserts(mock_subproccess_popen) + call_args_asserts(mock_popen_systemctl_service_ok) -def test_no_dkim_key(authorized_client, mock_broken_service): - assert MailServer.get_status() == ServiceStatus.FAILED +def test_systemctl_failed_service(mock_popen_systemctl_service_not_ok): + assert MailServer.get_status() == ServiceStatus.FAILED assert Bitwarden.get_status() == ServiceStatus.FAILED assert Gitea.get_status() == ServiceStatus.FAILED assert Nextcloud.get_status() == ServiceStatus.FAILED assert Ocserv.get_status() == ServiceStatus.FAILED assert Pleroma.get_status() == ServiceStatus.FAILED - call_args_asserts(mock_broken_service) + call_args_asserts(mock_popen_systemctl_service_not_ok) From fb15ef9388f09e6159eedaf27c7cda90ac49a889 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Mon, 25 Dec 2023 16:38:01 +0000 Subject: [PATCH 220/246] test(service): rename service systemctl calltests --- tests/{test_services_dkim.py => test_services_systemctl.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename tests/{test_services_dkim.py => test_services_systemctl.py} (100%) diff --git a/tests/test_services_dkim.py b/tests/test_services_systemctl.py similarity index 100% rename from tests/test_services_dkim.py rename to tests/test_services_systemctl.py From adcdbfb3687d16076f38d57cfccca1ac5f20fee5 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 27 Dec 2023 11:54:25 +0000 Subject: [PATCH 221/246] test(services): test mailserver dkim quirk --- tests/test_services.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/tests/test_services.py b/tests/test_services.py index 09784e9..f3d6adc 100644 --- a/tests/test_services.py +++ b/tests/test_services.py @@ -9,12 +9,15 @@ from selfprivacy_api.utils.waitloop import wait_until_true from selfprivacy_api.services.bitwarden import Bitwarden from selfprivacy_api.services.pleroma import Pleroma +from selfprivacy_api.services.mailserver import MailServer from selfprivacy_api.services.owned_path import OwnedPath from selfprivacy_api.services.generic_service_mover import FolderMoveNames from selfprivacy_api.services.test_service import DummyService from selfprivacy_api.services.service import Service, ServiceStatus, StoppedService +from tests.test_dkim import domain_file, dkim_file, no_dkim_file + def test_unimplemented_folders_raises(): with raises(NotImplementedError): @@ -145,3 +148,13 @@ def test_enabling_disabling_writes_json( dummy_service.disable() with ReadUserData() as data: assert data[dummy_service.get_id()]["enable"] is False + + +# more detailed testing of this is in test_graphql/test_system.py +def test_mailserver_with_dkim_returns_some_dns(dkim_file): + records = MailServer().get_dns_records() + assert len(records) > 0 + + +def test_mailserver_with_no_dkim_returns_no_dns(no_dkim_file): + assert MailServer().get_dns_records() == [] From e835173fea6209e3e5e21318eea1a51216ef5498 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 27 Dec 2023 11:58:07 +0000 Subject: [PATCH 222/246] fix(users): handle no admin case --- selfprivacy_api/actions/users.py | 2 +- tests/test_graphql/test_users.py | 27 ++++++++++++++++++++++++- tests/test_rest_endpoints/test_users.py | 9 --------- 3 files changed, 27 insertions(+), 11 deletions(-) diff --git a/selfprivacy_api/actions/users.py b/selfprivacy_api/actions/users.py index bfc1756..10ba29b 100644 --- a/selfprivacy_api/actions/users.py +++ b/selfprivacy_api/actions/users.py @@ -58,7 +58,7 @@ def get_users( ) for user in user_data["users"] ] - if not exclude_primary: + if not exclude_primary and "username" in user_data.keys(): users.append( UserDataUser( username=user_data["username"], diff --git a/tests/test_graphql/test_users.py b/tests/test_graphql/test_users.py index e397600..bef02c3 100644 --- a/tests/test_graphql/test_users.py +++ b/tests/test_graphql/test_users.py @@ -6,6 +6,7 @@ from tests.common import ( generate_users_query, read_json, ) +from selfprivacy_api.utils import WriteUserData from tests.test_graphql.common import assert_empty, assert_errorcode invalid_usernames = [ @@ -89,6 +90,15 @@ def undefined_settings(mocker, datadir): return datadir +@pytest.fixture +def no_users_no_admin_nobody(undefined_settings): + datadir = undefined_settings + with WriteUserData() as data: + del data["username"] + del data["sshKeys"] + return datadir + + class ProcessMock: """Mock subprocess.Popen""" @@ -170,7 +180,7 @@ def test_graphql_get_no_users(authorized_client, no_users, mock_subprocess_popen ] -def test_graphql_get_users_undefined(authorized_client, undefined_settings): +def test_graphql_get_users_undefined_but_admin(authorized_client, undefined_settings): response = authorized_client.post( "/graphql", json={ @@ -187,6 +197,21 @@ def test_graphql_get_users_undefined(authorized_client, undefined_settings): ] +def test_graphql_get_users_undefined_no_admin( + authorized_client, no_users_no_admin_nobody +): + response = authorized_client.post( + "/graphql", + json={ + "query": generate_users_query([API_USERS_INFO]), + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + + assert len(response.json()["data"]["users"]["allUsers"]) == 0 + + API_GET_USERS = """ query TestUsers($username: String!) { users { diff --git a/tests/test_rest_endpoints/test_users.py b/tests/test_rest_endpoints/test_users.py index c7c5f5b..16f494c 100644 --- a/tests/test_rest_endpoints/test_users.py +++ b/tests/test_rest_endpoints/test_users.py @@ -112,15 +112,6 @@ def mock_subprocess_popen(mocker): ## TESTS ###################################################### - -def test_get_undefined_users( - authorized_client, undefined_settings, mock_subprocess_popen -): - response = authorized_client.get("/users") - assert response.status_code == 200 - assert response.json() == [] - - # graphql tests still provide these fields even if with empty values From 2669e17c915e35061a6546797e30f74e030bc377 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 27 Dec 2023 12:43:58 +0000 Subject: [PATCH 223/246] test(users): test adding users with missing (not just empty) fields --- tests/test_graphql/test_users.py | 147 +++++++++--------------- tests/test_rest_endpoints/test_users.py | 22 ---- 2 files changed, 57 insertions(+), 112 deletions(-) diff --git a/tests/test_graphql/test_users.py b/tests/test_graphql/test_users.py index bef02c3..2a2dd7f 100644 --- a/tests/test_graphql/test_users.py +++ b/tests/test_graphql/test_users.py @@ -7,7 +7,12 @@ from tests.common import ( read_json, ) from selfprivacy_api.utils import WriteUserData -from tests.test_graphql.common import assert_empty, assert_errorcode +from tests.test_graphql.common import ( + assert_empty, + assert_errorcode, + assert_ok, + get_data, +) invalid_usernames = [ "messagebus", @@ -368,118 +373,80 @@ mutation createUser($user: UserMutationInput!) { """ -def test_graphql_add_user_unauthorize(client, one_user, mock_subprocess_popen): - response = client.post( +def api_add_user_json(authorized_client, user_json: dict): + # lowlevel for deeper testing of edgecases + return authorized_client.post( "/graphql", json={ "query": API_CREATE_USERS_MUTATION, "variables": { - "user": { - "username": "user2", - "password": "12345678", - }, + "user": user_json, }, }, ) + + +def api_add_user(authorized_client, username, password): + response = api_add_user_json( + authorized_client, {"username": username, "password": password} + ) + output = get_data(response)["users"]["createUser"] + return output + + +def test_graphql_add_user_unauthorized(client, one_user, mock_subprocess_popen): + response = api_add_user_json(client, {"username": "user2", "password": "12345678"}) assert_empty(response) def test_graphql_add_user(authorized_client, one_user, mock_subprocess_popen): - response = authorized_client.post( - "/graphql", - json={ - "query": API_CREATE_USERS_MUTATION, - "variables": { - "user": { - "username": "user2", - "password": "12345678", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None + output = api_add_user(authorized_client, "user2", password="12345678") + assert_ok(output, code=201) - assert response.json()["data"]["users"]["createUser"]["message"] is not None - assert response.json()["data"]["users"]["createUser"]["code"] == 201 - assert response.json()["data"]["users"]["createUser"]["success"] is True - - assert response.json()["data"]["users"]["createUser"]["user"]["username"] == "user2" - assert response.json()["data"]["users"]["createUser"]["user"]["sshKeys"] == [] + assert output["user"]["username"] == "user2" + assert output["user"]["sshKeys"] == [] -def test_graphql_add_undefined_settings( +def test_graphql_add_user_when_undefined_settings( authorized_client, undefined_settings, mock_subprocess_popen ): - response = authorized_client.post( - "/graphql", - json={ - "query": API_CREATE_USERS_MUTATION, - "variables": { - "user": { - "username": "user2", - "password": "12345678", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None + output = api_add_user(authorized_client, "user2", password="12345678") + assert_ok(output, code=201) - assert response.json()["data"]["users"]["createUser"]["message"] is not None - assert response.json()["data"]["users"]["createUser"]["code"] == 201 - assert response.json()["data"]["users"]["createUser"]["success"] is True - - assert response.json()["data"]["users"]["createUser"]["user"]["username"] == "user2" - assert response.json()["data"]["users"]["createUser"]["user"]["sshKeys"] == [] + assert output["user"]["username"] == "user2" + assert output["user"]["sshKeys"] == [] -def test_graphql_add_without_password( - authorized_client, one_user, mock_subprocess_popen -): - response = authorized_client.post( - "/graphql", - json={ - "query": API_CREATE_USERS_MUTATION, - "variables": { - "user": { - "username": "user2", - "password": "", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - - assert response.json()["data"]["users"]["createUser"]["message"] is not None - assert response.json()["data"]["users"]["createUser"]["code"] == 400 - assert response.json()["data"]["users"]["createUser"]["success"] is False - - assert response.json()["data"]["users"]["createUser"]["user"] is None +users_witn_empty_fields = [ + {"username": "user2", "password": ""}, + {"username": "", "password": "12345678"}, + {"username": "", "password": ""}, +] -def test_graphql_add_without_both(authorized_client, one_user, mock_subprocess_popen): - response = authorized_client.post( - "/graphql", - json={ - "query": API_CREATE_USERS_MUTATION, - "variables": { - "user": { - "username": "", - "password": "", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None +@pytest.mark.parametrize("user_json", users_witn_empty_fields) +def test_graphql_add_with_empty_fields(authorized_client, one_user, user_json): + response = api_add_user_json(authorized_client, user_json) + output = get_data(response)["users"]["createUser"] - assert response.json()["data"]["users"]["createUser"]["message"] is not None - assert response.json()["data"]["users"]["createUser"]["code"] == 400 - assert response.json()["data"]["users"]["createUser"]["success"] is False + assert_errorcode(output, 400) - assert response.json()["data"]["users"]["createUser"]["user"] is None + assert output["user"] is None + + +users_witn_undefined_fields = [ + {"username": "user2"}, + {"password": "12345678"}, + {}, +] + + +@pytest.mark.parametrize("user_json", users_witn_undefined_fields) +def test_graphql_add_with_undefined_fields(authorized_client, one_user, user_json): + # checking that all fields are mandatory + response = api_add_user_json(authorized_client, user_json) + assert response.json()["errors"] is not None + assert response.json()["errors"] != [] @pytest.mark.parametrize("username", invalid_usernames) diff --git a/tests/test_rest_endpoints/test_users.py b/tests/test_rest_endpoints/test_users.py index 16f494c..2c325b3 100644 --- a/tests/test_rest_endpoints/test_users.py +++ b/tests/test_rest_endpoints/test_users.py @@ -112,28 +112,6 @@ def mock_subprocess_popen(mocker): ## TESTS ###################################################### -# graphql tests still provide these fields even if with empty values - - -def test_post_without_username(authorized_client, one_user, mock_subprocess_popen): - response = authorized_client.post("/users", json={"password": "password"}) - assert response.status_code == 422 - - -def test_post_without_password(authorized_client, one_user, mock_subprocess_popen): - response = authorized_client.post("/users", json={"username": "user4"}) - assert response.status_code == 422 - - -def test_post_without_username_and_password( - authorized_client, one_user, mock_subprocess_popen -): - response = authorized_client.post("/users", json={}) - assert response.status_code == 422 - - -# end of BUT THERE ARE FIELDS! rant - # the final user is not in gql checks # I think maybe generate a bunch? @pytest.mark.parametrize("username", ["", "1", "фыр", "user1@", "№:%##$^&@$&^()_"]) From c470ec45e8cacb0e33481b414c859de0fd7926c5 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 27 Dec 2023 13:06:44 +0000 Subject: [PATCH 224/246] test(users): test invalid usernames (and delete it from rest) --- tests/test_graphql/test_users.py | 130 +++++------------------- tests/test_rest_endpoints/test_users.py | 13 --- 2 files changed, 23 insertions(+), 120 deletions(-) diff --git a/tests/test_graphql/test_users.py b/tests/test_graphql/test_users.py index 2a2dd7f..96ecb85 100644 --- a/tests/test_graphql/test_users.py +++ b/tests/test_graphql/test_users.py @@ -430,7 +430,6 @@ def test_graphql_add_with_empty_fields(authorized_client, one_user, user_json): output = get_data(response)["users"]["createUser"] assert_errorcode(output, 400) - assert output["user"] is None @@ -445,6 +444,7 @@ users_witn_undefined_fields = [ def test_graphql_add_with_undefined_fields(authorized_client, one_user, user_json): # checking that all fields are mandatory response = api_add_user_json(authorized_client, user_json) + assert response.json()["errors"] is not None assert response.json()["errors"] != [] @@ -453,130 +453,46 @@ def test_graphql_add_with_undefined_fields(authorized_client, one_user, user_jso def test_graphql_add_system_username( authorized_client, one_user, mock_subprocess_popen, username ): - response = authorized_client.post( - "/graphql", - json={ - "query": API_CREATE_USERS_MUTATION, - "variables": { - "user": { - "username": username, - "password": "12345678", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None + output = api_add_user(authorized_client, username, password="12345678") - assert response.json()["data"]["users"]["createUser"]["message"] is not None - assert response.json()["data"]["users"]["createUser"]["code"] == 409 - assert response.json()["data"]["users"]["createUser"]["success"] is False - - assert response.json()["data"]["users"]["createUser"]["user"] is None + assert_errorcode(output, code=409) + assert output["user"] is None -def test_graphql_add_existing_user(authorized_client, one_user, mock_subprocess_popen): - response = authorized_client.post( - "/graphql", - json={ - "query": API_CREATE_USERS_MUTATION, - "variables": { - "user": { - "username": "user1", - "password": "12345678", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None +def test_graphql_add_existing_user(authorized_client, one_user): + output = api_add_user(authorized_client, "user1", password="12345678") - assert response.json()["data"]["users"]["createUser"]["message"] is not None - assert response.json()["data"]["users"]["createUser"]["code"] == 409 - assert response.json()["data"]["users"]["createUser"]["success"] is False - - assert response.json()["data"]["users"]["createUser"]["user"]["username"] == "user1" - assert ( - response.json()["data"]["users"]["createUser"]["user"]["sshKeys"][0] - == "ssh-rsa KEY user1@pc" - ) + assert_errorcode(output, code=409) + assert output["user"]["username"] == "user1" + assert output["user"]["sshKeys"][0] == "ssh-rsa KEY user1@pc" def test_graphql_add_main_user(authorized_client, one_user, mock_subprocess_popen): - response = authorized_client.post( - "/graphql", - json={ - "query": API_CREATE_USERS_MUTATION, - "variables": { - "user": { - "username": "tester", - "password": "12345678", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None + output = api_add_user(authorized_client, "tester", password="12345678") - assert response.json()["data"]["users"]["createUser"]["message"] is not None - assert response.json()["data"]["users"]["createUser"]["code"] == 409 - assert response.json()["data"]["users"]["createUser"]["success"] is False - - assert ( - response.json()["data"]["users"]["createUser"]["user"]["username"] == "tester" - ) - assert ( - response.json()["data"]["users"]["createUser"]["user"]["sshKeys"][0] - == "ssh-rsa KEY test@pc" - ) + assert_errorcode(output, code=409) + assert output["user"]["username"] == "tester" + assert output["user"]["sshKeys"][0] == "ssh-rsa KEY test@pc" def test_graphql_add_long_username(authorized_client, one_user, mock_subprocess_popen): - response = authorized_client.post( - "/graphql", - json={ - "query": API_CREATE_USERS_MUTATION, - "variables": { - "user": { - "username": "a" * 32, - "password": "12345678", - }, - }, - }, - ) - assert response.json().get("data") is not None + output = api_add_user(authorized_client, "a" * 32, password="12345678") - assert response.json()["data"]["users"]["createUser"]["message"] is not None - assert response.json()["data"]["users"]["createUser"]["code"] == 400 - assert response.json()["data"]["users"]["createUser"]["success"] is False - - assert response.json()["data"]["users"]["createUser"]["user"] is None + assert_errorcode(output, code=400) + assert output["user"] is None -@pytest.mark.parametrize("username", ["", "1", "фыр", "user1@", "^-^"]) +# TODO: maybe make a username generating function to make a more comprehensive invalid username test +@pytest.mark.parametrize( + "username", ["", "1", "фыр", "user1@", "^-^", "№:%##$^&@$&^()_"] +) def test_graphql_add_invalid_username( authorized_client, one_user, mock_subprocess_popen, username ): - response = authorized_client.post( - "/graphql", - json={ - "query": API_CREATE_USERS_MUTATION, - "variables": { - "user": { - "username": username, - "password": "12345678", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None + output = api_add_user(authorized_client, username, password="12345678") - assert response.json()["data"]["users"]["createUser"]["message"] is not None - assert response.json()["data"]["users"]["createUser"]["code"] == 400 - assert response.json()["data"]["users"]["createUser"]["success"] is False - - assert response.json()["data"]["users"]["createUser"]["user"] is None + assert_errorcode(output, code=400) + assert output["user"] is None API_DELETE_USER_MUTATION = """ diff --git a/tests/test_rest_endpoints/test_users.py b/tests/test_rest_endpoints/test_users.py index 2c325b3..89f4331 100644 --- a/tests/test_rest_endpoints/test_users.py +++ b/tests/test_rest_endpoints/test_users.py @@ -111,19 +111,6 @@ def mock_subprocess_popen(mocker): ## TESTS ###################################################### - -# the final user is not in gql checks -# I think maybe generate a bunch? -@pytest.mark.parametrize("username", ["", "1", "фыр", "user1@", "№:%##$^&@$&^()_"]) -def test_post_invalid_username( - authorized_client, one_user, mock_subprocess_popen, username -): - response = authorized_client.post( - "/users", json={"username": username, "password": "password"} - ) - assert response.status_code == 400 - - # gql counterpart is too weak def test_delete_user(authorized_client, some_users, mock_subprocess_popen): response = authorized_client.delete("/users/user1") From 2e775dad90289f390b841d95d1ea25deacbc8fc9 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 27 Dec 2023 13:44:39 +0000 Subject: [PATCH 225/246] fix(users): handle no admin name defined when adding a user --- selfprivacy_api/actions/users.py | 10 ++++++++++ selfprivacy_api/graphql/mutations/users_mutations.py | 6 ++++++ tests/test_graphql/test_users.py | 11 ++++++++++- 3 files changed, 26 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/actions/users.py b/selfprivacy_api/actions/users.py index 10ba29b..fafa84f 100644 --- a/selfprivacy_api/actions/users.py +++ b/selfprivacy_api/actions/users.py @@ -107,6 +107,12 @@ class PasswordIsEmpty(Exception): pass +class InvalidConfiguration(Exception): + """The userdata is broken""" + + pass + + def create_user(username: str, password: str): if password == "": raise PasswordIsEmpty("Password is empty") @@ -124,6 +130,10 @@ def create_user(username: str, password: str): with ReadUserData() as user_data: ensure_ssh_and_users_fields_exist(user_data) + if "username" not in user_data.keys(): + raise InvalidConfiguration( + "Broken config: Admin name is not defined. Consider recovery or add it manually" + ) if username == user_data["username"]: raise UserAlreadyExists("User already exists") if username in [user["username"] for user in user_data["users"]]: diff --git a/selfprivacy_api/graphql/mutations/users_mutations.py b/selfprivacy_api/graphql/mutations/users_mutations.py index 57825bc..7644b90 100644 --- a/selfprivacy_api/graphql/mutations/users_mutations.py +++ b/selfprivacy_api/graphql/mutations/users_mutations.py @@ -69,6 +69,12 @@ class UsersMutations: message=str(e), code=400, ) + except users_actions.InvalidConfiguration as e: + return UserMutationReturn( + success=False, + message=str(e), + code=400, + ) except users_actions.UserAlreadyExists as e: return UserMutationReturn( success=False, diff --git a/tests/test_graphql/test_users.py b/tests/test_graphql/test_users.py index 96ecb85..f3ba02d 100644 --- a/tests/test_graphql/test_users.py +++ b/tests/test_graphql/test_users.py @@ -467,7 +467,7 @@ def test_graphql_add_existing_user(authorized_client, one_user): assert output["user"]["sshKeys"][0] == "ssh-rsa KEY user1@pc" -def test_graphql_add_main_user(authorized_client, one_user, mock_subprocess_popen): +def test_graphql_add_main_user(authorized_client, one_user): output = api_add_user(authorized_client, "tester", password="12345678") assert_errorcode(output, code=409) @@ -475,6 +475,15 @@ def test_graphql_add_main_user(authorized_client, one_user, mock_subprocess_pope assert output["user"]["sshKeys"][0] == "ssh-rsa KEY test@pc" +def test_graphql_add_user_when_no_admin_defined( + authorized_client, no_users_no_admin_nobody +): + output = api_add_user(authorized_client, "tester", password="12345678") + + assert_errorcode(output, code=400) + assert output["user"] is None + + def test_graphql_add_long_username(authorized_client, one_user, mock_subprocess_popen): output = api_add_user(authorized_client, "a" * 32, password="12345678") From e7c89e3e3fe6350a2d7497d1313eb748db75ed99 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 27 Dec 2023 14:31:50 +0000 Subject: [PATCH 226/246] test(users): delete a user and CHECK that it was deleted --- tests/test_graphql/test_users.py | 16 +++++++++++++++ tests/test_rest_endpoints/test_users.py | 10 ---------- tests/test_users.py | 26 +++++++++++++++++++++++++ 3 files changed, 42 insertions(+), 10 deletions(-) create mode 100644 tests/test_users.py diff --git a/tests/test_graphql/test_users.py b/tests/test_graphql/test_users.py index f3ba02d..99f5934 100644 --- a/tests/test_graphql/test_users.py +++ b/tests/test_graphql/test_users.py @@ -133,6 +133,17 @@ allUsers { """ +def api_all_users(authorized_client): + response = authorized_client.post( + "/graphql", + json={ + "query": generate_users_query([API_USERS_INFO]), + }, + ) + output = get_data(response)["users"]["allUsers"] + return output + + def test_graphql_get_users_unauthorized(client, some_users, mock_subprocess_popen): """Test wrong auth""" response = client.post( @@ -543,6 +554,11 @@ def test_graphql_delete_user(authorized_client, some_users, mock_subprocess_pope assert response.json()["data"]["users"]["deleteUser"]["message"] is not None assert response.json()["data"]["users"]["deleteUser"]["success"] is True + new_users = api_all_users(authorized_client) + assert len(new_users) == 3 + usernames = [user["username"] for user in new_users] + assert set(usernames) == set(["user2", "user3", "tester"]) + @pytest.mark.parametrize("username", ["", "def"]) def test_graphql_delete_nonexistent_users( diff --git a/tests/test_rest_endpoints/test_users.py b/tests/test_rest_endpoints/test_users.py index 89f4331..6123568 100644 --- a/tests/test_rest_endpoints/test_users.py +++ b/tests/test_rest_endpoints/test_users.py @@ -111,16 +111,6 @@ def mock_subprocess_popen(mocker): ## TESTS ###################################################### -# gql counterpart is too weak -def test_delete_user(authorized_client, some_users, mock_subprocess_popen): - response = authorized_client.delete("/users/user1") - assert response.status_code == 200 - assert read_json(some_users / "some_users.json")["users"] == [ - {"username": "user2", "hashedPassword": "HASHED_PASSWORD_2", "sshKeys": []}, - {"username": "user3", "hashedPassword": "HASHED_PASSWORD_3"}, - ] - - def test_delete_main_user(authorized_client, some_users, mock_subprocess_popen): response = authorized_client.delete("/users/tester") assert response.status_code == 400 diff --git a/tests/test_users.py b/tests/test_users.py new file mode 100644 index 0000000..2f613db --- /dev/null +++ b/tests/test_users.py @@ -0,0 +1,26 @@ +from selfprivacy_api.utils import ReadUserData, WriteUserData +from selfprivacy_api.actions.users import delete_user +""" + A place for user storage tests and other user tests that are not Graphql-specific. +""" + +# yes it is an incomplete suite. +# It was born in order to not lose things that REST API tests checked for +# In the future, user storage tests that are not dependent on actual API (graphql or otherwise) go here. + +def test_delete_user_writes_json(generic_userdata): + delete_user("user2") + with ReadUserData() as data: + assert data["users"] == [ + { + "username": "user1", + "hashedPassword": "HASHED_PASSWORD_1", + "sshKeys": ["ssh-rsa KEY user1@pc"] + }, + { + "username": "user3", + "hashedPassword": "HASHED_PASSWORD_3", + "sshKeys": ["ssh-rsa KEY user3@pc"] + } + ] + From ab081f6fbcd248da79bd4d4bb04e092828c86040 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 27 Dec 2023 14:38:08 +0000 Subject: [PATCH 227/246] test(users): delete redundant rest admin deletion tests --- tests/test_rest_endpoints/test_users.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/tests/test_rest_endpoints/test_users.py b/tests/test_rest_endpoints/test_users.py index 6123568..f660126 100644 --- a/tests/test_rest_endpoints/test_users.py +++ b/tests/test_rest_endpoints/test_users.py @@ -111,9 +111,6 @@ def mock_subprocess_popen(mocker): ## TESTS ###################################################### -def test_delete_main_user(authorized_client, some_users, mock_subprocess_popen): - response = authorized_client.delete("/users/tester") - assert response.status_code == 400 def test_delete_just_delete(authorized_client, some_users, mock_subprocess_popen): From dcf6dd9ac5e9d7d59fdc9db5f727f61781a42baa Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 27 Dec 2023 14:46:52 +0000 Subject: [PATCH 228/246] test(users): delete rest user tests --- tests/test_rest_endpoints/test_users.py | 118 ------------------------ 1 file changed, 118 deletions(-) delete mode 100644 tests/test_rest_endpoints/test_users.py diff --git a/tests/test_rest_endpoints/test_users.py b/tests/test_rest_endpoints/test_users.py deleted file mode 100644 index f660126..0000000 --- a/tests/test_rest_endpoints/test_users.py +++ /dev/null @@ -1,118 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -import json -import pytest - - -def read_json(file_path): - with open(file_path, "r", encoding="utf-8") as file: - return json.load(file) - - -invalid_usernames = [ - "root", - "messagebus", - "postfix", - "polkituser", - "dovecot2", - "dovenull", - "nginx", - "postgres", - "systemd-journal-gateway", - "prosody", - "systemd-network", - "systemd-resolve", - "systemd-timesync", - "opendkim", - "rspamd", - "sshd", - "selfprivacy-api", - "restic", - "redis", - "pleroma", - "ocserv", - "nextcloud", - "memcached", - "knot-resolver", - "gitea", - "bitwarden_rs", - "vaultwarden", - "acme", - "virtualMail", - "nixbld1", - "nixbld2", - "nixbld29", - "nobody", -] - - -## FIXTURES ################################################### - - -@pytest.fixture -def no_users(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "no_users.json") - assert read_json(datadir / "no_users.json")["users"] == [] - return datadir - - -@pytest.fixture -def one_user(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "one_user.json") - assert read_json(datadir / "one_user.json")["users"] == [ - { - "username": "user1", - "hashedPassword": "HASHED_PASSWORD_1", - "sshKeys": ["ssh-rsa KEY user1@pc"], - } - ] - return datadir - - -@pytest.fixture -def some_users(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "some_users.json") - assert read_json(datadir / "some_users.json")["users"] == [ - { - "username": "user1", - "hashedPassword": "HASHED_PASSWORD_1", - "sshKeys": ["ssh-rsa KEY user1@pc"], - }, - {"username": "user2", "hashedPassword": "HASHED_PASSWORD_2", "sshKeys": []}, - {"username": "user3", "hashedPassword": "HASHED_PASSWORD_3"}, - ] - return datadir - - -@pytest.fixture -def undefined_settings(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "undefined.json") - assert "users" not in read_json(datadir / "undefined.json") - return datadir - - -class ProcessMock: - """Mock subprocess.Popen""" - - def __init__(self, args, **kwargs): - self.args = args - self.kwargs = kwargs - - def communicate(): - return (b"NEW_HASHED", None) - - returncode = 0 - - -@pytest.fixture -def mock_subprocess_popen(mocker): - mock = mocker.patch("subprocess.Popen", autospec=True, return_value=ProcessMock) - return mock - - -## TESTS ###################################################### - - -def test_delete_just_delete(authorized_client, some_users, mock_subprocess_popen): - response = authorized_client.delete("/users") - assert response.status_code == 405 From 4a580e9b7b7eb28d994fdb06cc136214ad0a89e4 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 29 Dec 2023 13:11:03 +0000 Subject: [PATCH 229/246] feature(system): better error handling for shell calls --- selfprivacy_api/actions/system.py | 54 +++++++++----- .../graphql/mutations/system_mutations.py | 70 +++++++++++++------ tests/test_graphql/test_system_nixos_tasks.py | 9 --- 3 files changed, 86 insertions(+), 47 deletions(-) diff --git a/selfprivacy_api/actions/system.py b/selfprivacy_api/actions/system.py index 853662f..f5e0dc0 100644 --- a/selfprivacy_api/actions/system.py +++ b/selfprivacy_api/actions/system.py @@ -2,7 +2,7 @@ import os import subprocess import pytz -from typing import Optional +from typing import Optional, List from pydantic import BaseModel from selfprivacy_api.utils import WriteUserData, ReadUserData @@ -58,36 +58,56 @@ def set_auto_upgrade_settings( user_data["autoUpgrade"]["allowReboot"] = allowReboot +class ShellException(Exception): + """Something went wrong when calling another process""" + + pass + + +def run_blocking(cmd: List[str], new_session: bool = False) -> str: + """Run a process, block until done, return output, complain if failed""" + process_handle = subprocess.Popen( + cmd, + shell=False, + start_new_session=new_session, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + stdout_raw, stderr_raw = process_handle.communicate() + stdout = stdout_raw.decode("utf-8") + if stderr_raw is not None: + stderr = stderr_raw.decode("utf-8") + else: + stderr = "" + output = stdout + "\n" + stderr + if process_handle.returncode != 0: + raise ShellException( + f"Shell command failed, command array: {cmd}, output: {output}" + ) + return stdout + + def rebuild_system() -> int: """Rebuild the system""" - rebuild_result = subprocess.Popen( - ["systemctl", "start", "sp-nixos-rebuild.service"], start_new_session=True - ) - rebuild_result.communicate()[0] - return rebuild_result.returncode + run_blocking(["systemctl", "start", "sp-nixos-rebuild.service"], new_session=True) + return 0 def rollback_system() -> int: """Rollback the system""" - rollback_result = subprocess.Popen( - ["systemctl", "start", "sp-nixos-rollback.service"], start_new_session=True - ) - rollback_result.communicate()[0] - return rollback_result.returncode + run_blocking(["systemctl", "start", "sp-nixos-rollback.service"], new_session=True) + return 0 def upgrade_system() -> int: """Upgrade the system""" - upgrade_result = subprocess.Popen( - ["systemctl", "start", "sp-nixos-upgrade.service"], start_new_session=True - ) - upgrade_result.communicate()[0] - return upgrade_result.returncode + run_blocking(["systemctl", "start", "sp-nixos-upgrade.service"], new_session=True) + return 0 def reboot_system() -> None: """Reboot the system""" - subprocess.Popen(["reboot"], start_new_session=True) + run_blocking(["reboot"], new_session=True) def get_system_version() -> str: diff --git a/selfprivacy_api/graphql/mutations/system_mutations.py b/selfprivacy_api/graphql/mutations/system_mutations.py index b0cdae8..13ac16b 100644 --- a/selfprivacy_api/graphql/mutations/system_mutations.py +++ b/selfprivacy_api/graphql/mutations/system_mutations.py @@ -115,39 +115,67 @@ class SystemMutations: @strawberry.mutation(permission_classes=[IsAuthenticated]) def run_system_rebuild(self) -> GenericMutationReturn: - system_actions.rebuild_system() - return GenericMutationReturn( - success=True, - message="Starting rebuild system", - code=200, - ) + try: + system_actions.rebuild_system() + return GenericMutationReturn( + success=True, + message="Starting rebuild system", + code=200, + ) + except system_actions.ShellException as e: + return GenericMutationReturn( + success=False, + message=str(e), + code=500, + ) @strawberry.mutation(permission_classes=[IsAuthenticated]) def run_system_rollback(self) -> GenericMutationReturn: system_actions.rollback_system() - return GenericMutationReturn( - success=True, - message="Starting rebuild system", - code=200, - ) + try: + return GenericMutationReturn( + success=True, + message="Starting rebuild system", + code=200, + ) + except system_actions.ShellException as e: + return GenericMutationReturn( + success=False, + message=str(e), + code=500, + ) @strawberry.mutation(permission_classes=[IsAuthenticated]) def run_system_upgrade(self) -> GenericMutationReturn: system_actions.upgrade_system() - return GenericMutationReturn( - success=True, - message="Starting rebuild system", - code=200, - ) + try: + return GenericMutationReturn( + success=True, + message="Starting rebuild system", + code=200, + ) + except system_actions.ShellException as e: + return GenericMutationReturn( + success=False, + message=str(e), + code=500, + ) @strawberry.mutation(permission_classes=[IsAuthenticated]) def reboot_system(self) -> GenericMutationReturn: system_actions.reboot_system() - return GenericMutationReturn( - success=True, - message="System reboot has started", - code=200, - ) + try: + return GenericMutationReturn( + success=True, + message="System reboot has started", + code=200, + ) + except system_actions.ShellException as e: + return GenericMutationReturn( + success=False, + message=str(e), + code=500, + ) @strawberry.mutation(permission_classes=[IsAuthenticated]) def pull_repository_changes(self) -> GenericMutationReturn: diff --git a/tests/test_graphql/test_system_nixos_tasks.py b/tests/test_graphql/test_system_nixos_tasks.py index b292fda..6052e9f 100644 --- a/tests/test_graphql/test_system_nixos_tasks.py +++ b/tests/test_graphql/test_system_nixos_tasks.py @@ -23,15 +23,6 @@ class ProcessMock: returncode = 0 -class BrokenServiceMock(ProcessMock): - """Mock subprocess.Popen for broken service""" - - def communicate(): # pylint: disable=no-method-argument - return (b"Testing error", None) - - returncode = 3 - - @pytest.fixture def mock_subprocess_popen(mocker): mock = mocker.patch("subprocess.Popen", autospec=True, return_value=ProcessMock) From 46cc3171abb76acbef867cbb048c5445a8488404 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 29 Dec 2023 13:12:32 +0000 Subject: [PATCH 230/246] test(system): test generic shell calls --- tests/test_system.py | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 tests/test_system.py diff --git a/tests/test_system.py b/tests/test_system.py new file mode 100644 index 0000000..549692e --- /dev/null +++ b/tests/test_system.py @@ -0,0 +1,22 @@ +import pytest +from selfprivacy_api.actions.system import run_blocking, ShellException + +# uname is just an arbitrary command expected to be everywhere we care + + +def test_uname(): + output = run_blocking(["uname"]) + assert output is not None + + +def test_uname_new_session(): + output = run_blocking(["uname"], new_session=True) + assert output is not None + + +def test_uname_nonexistent_args(): + with pytest.raises(ShellException) as exception_info: + # uname: extra operand ‘sldfkjsljf’ + # Try 'uname --help' for more information + run_blocking(["uname", "isdyfhishfaisljhkeysmash"], new_session=True) + assert "extra operand" in exception_info.value.args[0] From d96739c9ae8c9463684f1a0a9dab91e2d7d15f8a Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 29 Dec 2023 13:13:08 +0000 Subject: [PATCH 231/246] test(system): remove rest system tests --- tests/test_rest_endpoints/data/jobs.json | 1 - tests/test_rest_endpoints/test_system.py | 167 ------------------ tests/test_rest_endpoints/test_system/domain | 1 - .../test_system/no_values.json | 55 ------ .../test_system/turned_off.json | 57 ------ .../test_system/turned_on.json | 57 ------ .../test_system/undefined.json | 52 ------ .../test_users/no_users.json | 59 ------- .../test_users/one_user.json | 66 ------- .../test_users/some_users.json | 76 -------- .../test_users/undefined.json | 57 ------ 11 files changed, 648 deletions(-) delete mode 100644 tests/test_rest_endpoints/data/jobs.json delete mode 100644 tests/test_rest_endpoints/test_system.py delete mode 100644 tests/test_rest_endpoints/test_system/domain delete mode 100644 tests/test_rest_endpoints/test_system/no_values.json delete mode 100644 tests/test_rest_endpoints/test_system/turned_off.json delete mode 100644 tests/test_rest_endpoints/test_system/turned_on.json delete mode 100644 tests/test_rest_endpoints/test_system/undefined.json delete mode 100644 tests/test_rest_endpoints/test_users/no_users.json delete mode 100644 tests/test_rest_endpoints/test_users/one_user.json delete mode 100644 tests/test_rest_endpoints/test_users/some_users.json delete mode 100644 tests/test_rest_endpoints/test_users/undefined.json diff --git a/tests/test_rest_endpoints/data/jobs.json b/tests/test_rest_endpoints/data/jobs.json deleted file mode 100644 index 0967ef4..0000000 --- a/tests/test_rest_endpoints/data/jobs.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/tests/test_rest_endpoints/test_system.py b/tests/test_rest_endpoints/test_system.py deleted file mode 100644 index f2b20db..0000000 --- a/tests/test_rest_endpoints/test_system.py +++ /dev/null @@ -1,167 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=missing-function-docstring - -import json -import os -import pytest -from selfprivacy_api.utils import get_domain - - -def read_json(file_path): - with open(file_path, "r", encoding="utf-8") as file: - return json.load(file) - - -@pytest.fixture -def domain_file(mocker, datadir): - mocker.patch("selfprivacy_api.utils.DOMAIN_FILE", datadir / "domain") - return datadir - - -@pytest.fixture -def turned_on(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_on.json") - assert read_json(datadir / "turned_on.json")["autoUpgrade"]["enable"] == True - assert read_json(datadir / "turned_on.json")["autoUpgrade"]["allowReboot"] == True - assert read_json(datadir / "turned_on.json")["timezone"] == "Europe/Moscow" - return datadir - - -@pytest.fixture -def turned_off(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_off.json") - assert read_json(datadir / "turned_off.json")["autoUpgrade"]["enable"] == False - assert read_json(datadir / "turned_off.json")["autoUpgrade"]["allowReboot"] == False - assert read_json(datadir / "turned_off.json")["timezone"] == "Europe/Moscow" - return datadir - - -@pytest.fixture -def undefined_config(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "undefined.json") - assert "autoUpgrade" not in read_json(datadir / "undefined.json") - assert "timezone" not in read_json(datadir / "undefined.json") - return datadir - - -@pytest.fixture -def no_values(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "no_values.json") - assert "enable" not in read_json(datadir / "no_values.json")["autoUpgrade"] - assert "allowReboot" not in read_json(datadir / "no_values.json")["autoUpgrade"] - return datadir - - -class ProcessMock: - """Mock subprocess.Popen""" - - def __init__(self, args, **kwargs): - self.args = args - self.kwargs = kwargs - - def communicate(): - return (b"", None) - - returncode = 0 - - -class BrokenServiceMock(ProcessMock): - """Mock subprocess.Popen""" - - def communicate(): - return (b"Testing error", None) - - returncode = 3 - - -@pytest.fixture -def mock_subprocess_popen(mocker): - mock = mocker.patch("subprocess.Popen", autospec=True, return_value=ProcessMock) - return mock - - -@pytest.fixture -def mock_os_chdir(mocker): - mock = mocker.patch("os.chdir", autospec=True) - return mock - - -@pytest.fixture -def mock_broken_service(mocker): - mock = mocker.patch( - "subprocess.Popen", autospec=True, return_value=BrokenServiceMock - ) - return mock - - -@pytest.fixture -def mock_subprocess_check_output(mocker): - mock = mocker.patch( - "subprocess.check_output", autospec=True, return_value=b"Testing Linux" - ) - return mock - - -def test_system_rebuild_unauthorized(client, mock_subprocess_popen): - response = client.get("/system/configuration/apply") - assert response.status_code == 401 - assert mock_subprocess_popen.call_count == 0 - - -def test_system_rebuild(authorized_client, mock_subprocess_popen): - response = authorized_client.get("/system/configuration/apply") - assert response.status_code == 200 - assert mock_subprocess_popen.call_count == 1 - assert mock_subprocess_popen.call_args[0][0] == [ - "systemctl", - "start", - "sp-nixos-rebuild.service", - ] - - -def test_system_upgrade_unauthorized(client, mock_subprocess_popen): - response = client.get("/system/configuration/upgrade") - assert response.status_code == 401 - assert mock_subprocess_popen.call_count == 0 - - -def test_system_upgrade(authorized_client, mock_subprocess_popen): - response = authorized_client.get("/system/configuration/upgrade") - assert response.status_code == 200 - assert mock_subprocess_popen.call_count == 1 - assert mock_subprocess_popen.call_args[0][0] == [ - "systemctl", - "start", - "sp-nixos-upgrade.service", - ] - - -def test_system_rollback_unauthorized(client, mock_subprocess_popen): - response = client.get("/system/configuration/rollback") - assert response.status_code == 401 - assert mock_subprocess_popen.call_count == 0 - - -def test_system_rollback(authorized_client, mock_subprocess_popen): - response = authorized_client.get("/system/configuration/rollback") - assert response.status_code == 200 - assert mock_subprocess_popen.call_count == 1 - assert mock_subprocess_popen.call_args[0][0] == [ - "systemctl", - "start", - "sp-nixos-rollback.service", - ] - - -def test_reboot_system_unauthorized(client, mock_subprocess_popen): - response = client.get("/system/reboot") - assert response.status_code == 401 - assert mock_subprocess_popen.call_count == 0 - - -def test_reboot_system(authorized_client, mock_subprocess_popen): - response = authorized_client.get("/system/reboot") - assert response.status_code == 200 - assert mock_subprocess_popen.call_count == 1 - assert mock_subprocess_popen.call_args[0][0] == ["reboot"] diff --git a/tests/test_rest_endpoints/test_system/domain b/tests/test_rest_endpoints/test_system/domain deleted file mode 100644 index 3679d0d..0000000 --- a/tests/test_rest_endpoints/test_system/domain +++ /dev/null @@ -1 +0,0 @@ -test-domain.tld \ No newline at end of file diff --git a/tests/test_rest_endpoints/test_system/no_values.json b/tests/test_rest_endpoints/test_system/no_values.json deleted file mode 100644 index 5c1431e..0000000 --- a/tests/test_rest_endpoints/test_system/no_values.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": true - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/test_system/turned_off.json b/tests/test_rest_endpoints/test_system/turned_off.json deleted file mode 100644 index 2336f36..0000000 --- a/tests/test_rest_endpoints/test_system/turned_off.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": true - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": false, - "allowReboot": false - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/test_system/turned_on.json b/tests/test_rest_endpoints/test_system/turned_on.json deleted file mode 100644 index 42999d8..0000000 --- a/tests/test_rest_endpoints/test_system/turned_on.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": true - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/test_system/undefined.json b/tests/test_rest_endpoints/test_system/undefined.json deleted file mode 100644 index 6b9f3fd..0000000 --- a/tests/test_rest_endpoints/test_system/undefined.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": true - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/test_users/no_users.json b/tests/test_rest_endpoints/test_users/no_users.json deleted file mode 100644 index 5929a79..0000000 --- a/tests/test_rest_endpoints/test_users/no_users.json +++ /dev/null @@ -1,59 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "users": [ - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/test_users/one_user.json b/tests/test_rest_endpoints/test_users/one_user.json deleted file mode 100644 index 6c553bc..0000000 --- a/tests/test_rest_endpoints/test_users/one_user.json +++ /dev/null @@ -1,66 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "users": [ - { - "username": "user1", - "hashedPassword": "HASHED_PASSWORD_1", - "sshKeys": [ - "ssh-rsa KEY user1@pc" - ] - } - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/test_users/some_users.json b/tests/test_rest_endpoints/test_users/some_users.json deleted file mode 100644 index df6380a..0000000 --- a/tests/test_rest_endpoints/test_users/some_users.json +++ /dev/null @@ -1,76 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "users": [ - { - "username": "user1", - "hashedPassword": "HASHED_PASSWORD_1", - "sshKeys": [ - "ssh-rsa KEY user1@pc" - ] - }, - { - "username": "user2", - "hashedPassword": "HASHED_PASSWORD_2", - "sshKeys": [ - ] - }, - { - "username": "user3", - "hashedPassword": "HASHED_PASSWORD_3" - } - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/test_users/undefined.json b/tests/test_rest_endpoints/test_users/undefined.json deleted file mode 100644 index c1691ea..0000000 --- a/tests/test_rest_endpoints/test_users/undefined.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file From 43d9d47aed76eb1ed2f5ba48f9041943a66e147b Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 29 Dec 2023 13:34:06 +0000 Subject: [PATCH 232/246] feature(system): remove rest system code --- selfprivacy_api/app.py | 2 - selfprivacy_api/rest/system.py | 105 --------------------------------- 2 files changed, 107 deletions(-) delete mode 100644 selfprivacy_api/rest/system.py diff --git a/selfprivacy_api/app.py b/selfprivacy_api/app.py index a58301a..68cd814 100644 --- a/selfprivacy_api/app.py +++ b/selfprivacy_api/app.py @@ -11,7 +11,6 @@ from selfprivacy_api.graphql.schema import schema from selfprivacy_api.migrations import run_migrations from selfprivacy_api.rest import ( - system, users, api_auth, services, @@ -32,7 +31,6 @@ app.add_middleware( ) -app.include_router(system.router) app.include_router(users.router) app.include_router(api_auth.router) app.include_router(services.router) diff --git a/selfprivacy_api/rest/system.py b/selfprivacy_api/rest/system.py deleted file mode 100644 index 9933fb3..0000000 --- a/selfprivacy_api/rest/system.py +++ /dev/null @@ -1,105 +0,0 @@ -from typing import Optional -from fastapi import APIRouter, Body, Depends, HTTPException -from pydantic import BaseModel - -from selfprivacy_api.dependencies import get_token_header - -import selfprivacy_api.actions.system as system_actions - -router = APIRouter( - prefix="/system", - tags=["system"], - dependencies=[Depends(get_token_header)], - responses={404: {"description": "Not found"}}, -) - - -@router.get("/configuration/timezone") -async def get_timezone(): - """Get the timezone of the server""" - return system_actions.get_timezone() - - -class ChangeTimezoneRequestBody(BaseModel): - """Change the timezone of the server""" - - timezone: str - - -@router.put("/configuration/timezone") -async def change_timezone(timezone: ChangeTimezoneRequestBody): - """Change the timezone of the server""" - try: - system_actions.change_timezone(timezone.timezone) - except system_actions.InvalidTimezone as e: - raise HTTPException(status_code=400, detail=str(e)) - return {"timezone": timezone.timezone} - - -@router.get("/configuration/autoUpgrade") -async def get_auto_upgrade_settings(): - """Get the auto-upgrade settings""" - return system_actions.get_auto_upgrade_settings().dict() - - -class AutoUpgradeSettings(BaseModel): - """Settings for auto-upgrading user data""" - - enable: Optional[bool] = None - allowReboot: Optional[bool] = None - - -@router.put("/configuration/autoUpgrade") -async def set_auto_upgrade_settings(settings: AutoUpgradeSettings): - """Set the auto-upgrade settings""" - system_actions.set_auto_upgrade_settings(settings.enable, settings.allowReboot) - return "Auto-upgrade settings changed" - - -@router.get("/configuration/apply") -async def apply_configuration(): - """Apply the configuration""" - return_code = system_actions.rebuild_system() - return return_code - - -@router.get("/configuration/rollback") -async def rollback_configuration(): - """Rollback the configuration""" - return_code = system_actions.rollback_system() - return return_code - - -@router.get("/configuration/upgrade") -async def upgrade_configuration(): - """Upgrade the configuration""" - return_code = system_actions.upgrade_system() - return return_code - - -@router.get("/reboot") -async def reboot_system(): - """Reboot the system""" - system_actions.reboot_system() - return "System reboot has started" - - -@router.get("/version") -async def get_system_version(): - """Get the system version""" - return {"system_version": system_actions.get_system_version()} - - -@router.get("/pythonVersion") -async def get_python_version(): - """Get the Python version""" - return system_actions.get_python_version() - - -@router.get("/configuration/pull") -async def pull_configuration(): - """Pull the configuration""" - action_result = system_actions.pull_repository_changes() - if action_result.status == 0: - return action_result.dict() - raise HTTPException(status_code=500, detail=action_result.dict()) From 41cd876f576009fb04ac9142a56acc93c1b09582 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 29 Dec 2023 13:42:23 +0000 Subject: [PATCH 233/246] feature(users): remove rest users code --- selfprivacy_api/app.py | 2 -- selfprivacy_api/rest/users.py | 62 ----------------------------------- 2 files changed, 64 deletions(-) delete mode 100644 selfprivacy_api/rest/users.py diff --git a/selfprivacy_api/app.py b/selfprivacy_api/app.py index 68cd814..be28e29 100644 --- a/selfprivacy_api/app.py +++ b/selfprivacy_api/app.py @@ -11,7 +11,6 @@ from selfprivacy_api.graphql.schema import schema from selfprivacy_api.migrations import run_migrations from selfprivacy_api.rest import ( - users, api_auth, services, ) @@ -31,7 +30,6 @@ app.add_middleware( ) -app.include_router(users.router) app.include_router(api_auth.router) app.include_router(services.router) app.include_router(graphql_app, prefix="/graphql") diff --git a/selfprivacy_api/rest/users.py b/selfprivacy_api/rest/users.py deleted file mode 100644 index ab4c6c9..0000000 --- a/selfprivacy_api/rest/users.py +++ /dev/null @@ -1,62 +0,0 @@ -"""Users management module""" -from typing import Optional -from fastapi import APIRouter, Body, Depends, HTTPException -from pydantic import BaseModel - -import selfprivacy_api.actions.users as users_actions - -from selfprivacy_api.dependencies import get_token_header - -router = APIRouter( - prefix="/users", - tags=["users"], - dependencies=[Depends(get_token_header)], - responses={404: {"description": "Not found"}}, -) - - -@router.get("") -async def get_users(withMainUser: bool = False): - """Get the list of users""" - users: list[users_actions.UserDataUser] = users_actions.get_users( - exclude_primary=not withMainUser, exclude_root=True - ) - - return [user.username for user in users] - - -class UserInput(BaseModel): - """User input""" - - username: str - password: str - - -@router.post("", status_code=201) -async def create_user(user: UserInput): - try: - users_actions.create_user(user.username, user.password) - except users_actions.PasswordIsEmpty as e: - raise HTTPException(status_code=400, detail=str(e)) - except users_actions.UsernameForbidden as e: - raise HTTPException(status_code=409, detail=str(e)) - except users_actions.UsernameNotAlphanumeric as e: - raise HTTPException(status_code=400, detail=str(e)) - except users_actions.UsernameTooLong as e: - raise HTTPException(status_code=400, detail=str(e)) - except users_actions.UserAlreadyExists as e: - raise HTTPException(status_code=409, detail=str(e)) - - return {"result": 0, "username": user.username} - - -@router.delete("/{username}") -async def delete_user(username: str): - try: - users_actions.delete_user(username) - except users_actions.UserNotFound as e: - raise HTTPException(status_code=404, detail=str(e)) - except users_actions.UserIsProtected as e: - raise HTTPException(status_code=400, detail=str(e)) - - return {"result": 0, "username": username} From 3e1fbdd4aa8298ae97308bb8db36de06a83e5c94 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 29 Dec 2023 14:28:33 +0000 Subject: [PATCH 234/246] feature(services): remove rest services code --- selfprivacy_api/app.py | 2 - selfprivacy_api/rest/services.py | 336 ------------------------------- 2 files changed, 338 deletions(-) delete mode 100644 selfprivacy_api/rest/services.py diff --git a/selfprivacy_api/app.py b/selfprivacy_api/app.py index be28e29..913305c 100644 --- a/selfprivacy_api/app.py +++ b/selfprivacy_api/app.py @@ -12,7 +12,6 @@ from selfprivacy_api.migrations import run_migrations from selfprivacy_api.rest import ( api_auth, - services, ) app = FastAPI() @@ -31,7 +30,6 @@ app.add_middleware( app.include_router(api_auth.router) -app.include_router(services.router) app.include_router(graphql_app, prefix="/graphql") diff --git a/selfprivacy_api/rest/services.py b/selfprivacy_api/rest/services.py deleted file mode 100644 index c6dc12e..0000000 --- a/selfprivacy_api/rest/services.py +++ /dev/null @@ -1,336 +0,0 @@ -"""Basic services legacy api""" -import base64 -from typing import Optional -from fastapi import APIRouter, Depends, HTTPException -from pydantic import BaseModel -from selfprivacy_api.actions.ssh import ( - InvalidPublicKey, - KeyAlreadyExists, - KeyNotFound, - create_ssh_key, - enable_ssh, - get_ssh_settings, - remove_ssh_key, - set_ssh_settings, -) -from selfprivacy_api.actions.users import UserNotFound, get_user_by_username - -from selfprivacy_api.dependencies import get_token_header -from selfprivacy_api.services.bitwarden import Bitwarden -from selfprivacy_api.services.gitea import Gitea -from selfprivacy_api.services.mailserver import MailServer -from selfprivacy_api.services.nextcloud import Nextcloud -from selfprivacy_api.services.ocserv import Ocserv -from selfprivacy_api.services.pleroma import Pleroma -from selfprivacy_api.services.service import ServiceStatus -from selfprivacy_api.utils import get_dkim_key, get_domain - -router = APIRouter( - prefix="/services", - tags=["services"], - dependencies=[Depends(get_token_header)], - responses={404: {"description": "Not found"}}, -) - - -def service_status_to_return_code(status: ServiceStatus): - """Converts service status object to return code for - compatibility with legacy api""" - if status == ServiceStatus.ACTIVE: - return 0 - elif status == ServiceStatus.FAILED: - return 1 - elif status == ServiceStatus.INACTIVE: - return 3 - elif status == ServiceStatus.OFF: - return 4 - else: - return 2 - - -@router.get("/status") -async def get_status(): - """Get the status of the services""" - mail_status = MailServer.get_status() - bitwarden_status = Bitwarden.get_status() - gitea_status = Gitea.get_status() - nextcloud_status = Nextcloud.get_status() - ocserv_stauts = Ocserv.get_status() - pleroma_status = Pleroma.get_status() - - return { - "imap": service_status_to_return_code(mail_status), - "smtp": service_status_to_return_code(mail_status), - "http": 0, - "bitwarden": service_status_to_return_code(bitwarden_status), - "gitea": service_status_to_return_code(gitea_status), - "nextcloud": service_status_to_return_code(nextcloud_status), - "ocserv": service_status_to_return_code(ocserv_stauts), - "pleroma": service_status_to_return_code(pleroma_status), - } - - -@router.post("/bitwarden/enable") -async def enable_bitwarden(): - """Enable Bitwarden""" - Bitwarden.enable() - return { - "status": 0, - "message": "Bitwarden enabled", - } - - -@router.post("/bitwarden/disable") -async def disable_bitwarden(): - """Disable Bitwarden""" - Bitwarden.disable() - return { - "status": 0, - "message": "Bitwarden disabled", - } - - -@router.post("/gitea/enable") -async def enable_gitea(): - """Enable Gitea""" - Gitea.enable() - return { - "status": 0, - "message": "Gitea enabled", - } - - -@router.post("/gitea/disable") -async def disable_gitea(): - """Disable Gitea""" - Gitea.disable() - return { - "status": 0, - "message": "Gitea disabled", - } - - -@router.get("/mailserver/dkim") -async def get_mailserver_dkim(): - """Get the DKIM record for the mailserver""" - domain = get_domain() - - dkim = get_dkim_key(domain, parse=False) - if dkim is None: - raise HTTPException(status_code=404, detail="DKIM record not found") - dkim = base64.b64encode(dkim.encode("utf-8")).decode("utf-8") - return dkim - - -@router.post("/nextcloud/enable") -async def enable_nextcloud(): - """Enable Nextcloud""" - Nextcloud.enable() - return { - "status": 0, - "message": "Nextcloud enabled", - } - - -@router.post("/nextcloud/disable") -async def disable_nextcloud(): - """Disable Nextcloud""" - Nextcloud.disable() - return { - "status": 0, - "message": "Nextcloud disabled", - } - - -@router.post("/ocserv/enable") -async def enable_ocserv(): - """Enable Ocserv""" - Ocserv.enable() - return { - "status": 0, - "message": "Ocserv enabled", - } - - -@router.post("/ocserv/disable") -async def disable_ocserv(): - """Disable Ocserv""" - Ocserv.disable() - return { - "status": 0, - "message": "Ocserv disabled", - } - - -@router.post("/pleroma/enable") -async def enable_pleroma(): - """Enable Pleroma""" - Pleroma.enable() - return { - "status": 0, - "message": "Pleroma enabled", - } - - -@router.post("/pleroma/disable") -async def disable_pleroma(): - """Disable Pleroma""" - Pleroma.disable() - return { - "status": 0, - "message": "Pleroma disabled", - } - - -@router.get("/restic/backup/list") -async def get_restic_backup_list(): - raise HTTPException( - status_code=410, - detail="This endpoint is deprecated, please use GraphQL API", - ) - - -@router.put("/restic/backup/create") -async def create_restic_backup(): - raise HTTPException( - status_code=410, - detail="This endpoint is deprecated, please use GraphQL API", - ) - - -@router.get("/restic/backup/status") -async def get_restic_backup_status(): - raise HTTPException( - status_code=410, - detail="This endpoint is deprecated, please use GraphQL API", - ) - - -@router.get("/restic/backup/reload") -async def reload_restic_backup(): - raise HTTPException( - status_code=410, - detail="This endpoint is deprecated, please use GraphQL API", - ) - - -class BackupRestoreInput(BaseModel): - backupId: str - - -@router.put("/restic/backup/restore") -async def restore_restic_backup(backup: BackupRestoreInput): - raise HTTPException( - status_code=410, - detail="This endpoint is deprecated, please use GraphQL API", - ) - - -class BackupConfigInput(BaseModel): - accountId: str - accountKey: str - bucket: str - - -@router.put("/restic/backblaze/config") -async def set_backblaze_config(backup_config: BackupConfigInput): - raise HTTPException( - status_code=410, - detail="This endpoint is deprecated, please use GraphQL API", - ) - - -@router.post("/ssh/enable") -async def rest_enable_ssh(): - """Enable SSH""" - enable_ssh() - return { - "status": 0, - "message": "SSH enabled", - } - - -@router.get("/ssh") -async def rest_get_ssh(): - """Get the SSH configuration""" - settings = get_ssh_settings() - return { - "enable": settings.enable, - "passwordAuthentication": settings.passwordAuthentication, - } - - -class SshConfigInput(BaseModel): - enable: Optional[bool] = None - passwordAuthentication: Optional[bool] = None - - -@router.put("/ssh") -async def rest_set_ssh(ssh_config: SshConfigInput): - """Set the SSH configuration""" - set_ssh_settings(ssh_config.enable, ssh_config.passwordAuthentication) - - return "SSH settings changed" - - -class SshKeyInput(BaseModel): - public_key: str - - -@router.put("/ssh/key/send", status_code=201) -async def rest_send_ssh_key(input: SshKeyInput): - """Send the SSH key""" - try: - create_ssh_key("root", input.public_key) - except KeyAlreadyExists as error: - raise HTTPException(status_code=409, detail="Key already exists") from error - except InvalidPublicKey as error: - raise HTTPException( - status_code=400, - detail="Invalid key type. Only ssh-ed25519 and ssh-rsa are supported", - ) from error - - return { - "status": 0, - "message": "SSH key sent", - } - - -@router.get("/ssh/keys/{username}") -async def rest_get_ssh_keys(username: str): - """Get the SSH keys for a user""" - user = get_user_by_username(username) - if user is None: - raise HTTPException(status_code=404, detail="User not found") - - return user.ssh_keys - - -@router.post("/ssh/keys/{username}", status_code=201) -async def rest_add_ssh_key(username: str, input: SshKeyInput): - try: - create_ssh_key(username, input.public_key) - except KeyAlreadyExists as error: - raise HTTPException(status_code=409, detail="Key already exists") from error - except InvalidPublicKey as error: - raise HTTPException( - status_code=400, - detail="Invalid key type. Only ssh-ed25519 and ssh-rsa are supported", - ) from error - except UserNotFound as error: - raise HTTPException(status_code=404, detail="User not found") from error - - return { - "message": "New SSH key successfully written", - } - - -@router.delete("/ssh/keys/{username}") -async def rest_delete_ssh_key(username: str, input: SshKeyInput): - try: - remove_ssh_key(username, input.public_key) - except KeyNotFound as error: - raise HTTPException(status_code=404, detail="Key not found") from error - except UserNotFound as error: - raise HTTPException(status_code=404, detail="User not found") from error - return {"message": "SSH key deleted"} From 02b10b5078b7abd4c02315c3862918f659f6a8f7 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 29 Dec 2023 15:01:21 +0000 Subject: [PATCH 235/246] feature(auth): remove rest auth code --- selfprivacy_api/app.py | 4 - selfprivacy_api/rest/api_auth.py | 125 ------------------------------- 2 files changed, 129 deletions(-) delete mode 100644 selfprivacy_api/rest/api_auth.py diff --git a/selfprivacy_api/app.py b/selfprivacy_api/app.py index 913305c..64ca85a 100644 --- a/selfprivacy_api/app.py +++ b/selfprivacy_api/app.py @@ -10,9 +10,6 @@ from selfprivacy_api.dependencies import get_api_version from selfprivacy_api.graphql.schema import schema from selfprivacy_api.migrations import run_migrations -from selfprivacy_api.rest import ( - api_auth, -) app = FastAPI() @@ -29,7 +26,6 @@ app.add_middleware( ) -app.include_router(api_auth.router) app.include_router(graphql_app, prefix="/graphql") diff --git a/selfprivacy_api/rest/api_auth.py b/selfprivacy_api/rest/api_auth.py deleted file mode 100644 index 275dac3..0000000 --- a/selfprivacy_api/rest/api_auth.py +++ /dev/null @@ -1,125 +0,0 @@ -from datetime import datetime -from typing import Optional -from fastapi import APIRouter, Depends, HTTPException -from pydantic import BaseModel -from selfprivacy_api.actions.api_tokens import ( - CannotDeleteCallerException, - InvalidExpirationDate, - InvalidUsesLeft, - NotFoundException, - delete_api_token, - refresh_api_token, - get_api_recovery_token_status, - get_api_tokens_with_caller_flag, - get_new_api_recovery_key, - use_mnemonic_recovery_token, - delete_new_device_auth_token, - get_new_device_auth_token, - use_new_device_auth_token, -) - -from selfprivacy_api.dependencies import TokenHeader, get_token_header - - -router = APIRouter( - prefix="/auth", - tags=["auth"], - responses={404: {"description": "Not found"}}, -) - - -@router.get("/tokens") -async def rest_get_tokens(auth_token: TokenHeader = Depends(get_token_header)): - """Get the tokens info""" - return get_api_tokens_with_caller_flag(auth_token.token) - - -class DeleteTokenInput(BaseModel): - """Delete token input""" - - token_name: str - - -@router.delete("/tokens") -async def rest_delete_tokens( - token: DeleteTokenInput, auth_token: TokenHeader = Depends(get_token_header) -): - """Delete the tokens""" - try: - delete_api_token(auth_token.token, token.token_name) - except NotFoundException: - raise HTTPException(status_code=404, detail="Token not found") - except CannotDeleteCallerException: - raise HTTPException(status_code=400, detail="Cannot delete caller's token") - return {"message": "Token deleted"} - - -@router.post("/tokens") -async def rest_refresh_token(auth_token: TokenHeader = Depends(get_token_header)): - """Refresh the token""" - try: - new_token = refresh_api_token(auth_token.token) - except NotFoundException: - raise HTTPException(status_code=404, detail="Token not found") - return {"token": new_token} - - -@router.get("/recovery_token") -async def rest_get_recovery_token_status( - auth_token: TokenHeader = Depends(get_token_header), -): - return get_api_recovery_token_status() - - -class CreateRecoveryTokenInput(BaseModel): - expiration: Optional[datetime] = None - uses: Optional[int] = None - - -@router.post("/recovery_token") -async def rest_create_recovery_token( - limits: CreateRecoveryTokenInput = CreateRecoveryTokenInput(), - auth_token: TokenHeader = Depends(get_token_header), -): - try: - token = get_new_api_recovery_key(limits.expiration, limits.uses) - except InvalidExpirationDate as e: - raise HTTPException(status_code=400, detail=str(e)) - except InvalidUsesLeft as e: - raise HTTPException(status_code=400, detail=str(e)) - return {"token": token} - - -class UseTokenInput(BaseModel): - token: str - device: str - - -@router.post("/recovery_token/use") -async def rest_use_recovery_token(input: UseTokenInput): - token = use_mnemonic_recovery_token(input.token, input.device) - if token is None: - raise HTTPException(status_code=404, detail="Token not found") - return {"token": token} - - -@router.post("/new_device") -async def rest_new_device(auth_token: TokenHeader = Depends(get_token_header)): - token = get_new_device_auth_token() - return {"token": token} - - -@router.delete("/new_device") -async def rest_delete_new_device_token( - auth_token: TokenHeader = Depends(get_token_header), -): - delete_new_device_auth_token() - return {"token": None} - - -@router.post("/new_device/authorize") -async def rest_new_device_authorize(input: UseTokenInput): - token = use_new_device_auth_token(input.token, input.device) - if token is None: - raise HTTPException(status_code=404, detail="Token not found") - return {"message": "Device authorized", "token": token} From 3080f5a18b1a22b4457fd1f029d419a3d98aa7da Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 29 Dec 2023 15:19:17 +0000 Subject: [PATCH 236/246] feature(rest): remove rest --- selfprivacy_api/rest/__init__.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 selfprivacy_api/rest/__init__.py diff --git a/selfprivacy_api/rest/__init__.py b/selfprivacy_api/rest/__init__.py deleted file mode 100644 index e69de29..0000000 From b8d02231cf65b813878c3cb3e313a0a922bcb9fc Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 3 Jan 2024 15:46:48 +0000 Subject: [PATCH 237/246] fix(services): handle the async nature of moving. --- .../graphql/mutations/services_mutations.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/graphql/mutations/services_mutations.py b/selfprivacy_api/graphql/mutations/services_mutations.py index ad3b1b9..9bacf66 100644 --- a/selfprivacy_api/graphql/mutations/services_mutations.py +++ b/selfprivacy_api/graphql/mutations/services_mutations.py @@ -160,6 +160,8 @@ class ServicesMutations: message="Service not found.", code=404, ) + # TODO: make serviceImmovable and BlockdeviceNotFound exceptions + # in the move_to_volume() function and handle them here if not service.is_movable(): return ServiceJobMutationReturn( success=False, @@ -176,7 +178,15 @@ class ServicesMutations: service=service_to_graphql_service(service), ) job = service.move_to_volume(volume) - if job.status == JobStatus.FINISHED: + if job.status in [JobStatus.CREATED, JobStatus.RUNNING]: + return ServiceJobMutationReturn( + success=True, + message="Started moving the service.", + code=200, + service=service_to_graphql_service(service), + job=job_to_api_job(job), + ) + elif job.status == JobStatus.FINISHED: return ServiceJobMutationReturn( success=True, message="Service moved.", From 8e551a8fe0d9dcb093c25c9814a52e80fb25ee2e Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 3 Jan 2024 17:47:13 +0000 Subject: [PATCH 238/246] refactor(services): use generic code for enabling and disabling --- .../services/bitwarden/__init__.py | 21 ------------------- .../services/nextcloud/__init__.py | 21 ------------------- selfprivacy_api/services/ocserv/__init__.py | 19 ----------------- selfprivacy_api/services/pleroma/__init__.py | 19 ----------------- 4 files changed, 80 deletions(-) diff --git a/selfprivacy_api/services/bitwarden/__init__.py b/selfprivacy_api/services/bitwarden/__init__.py index 0d1dfdc..1590729 100644 --- a/selfprivacy_api/services/bitwarden/__init__.py +++ b/selfprivacy_api/services/bitwarden/__init__.py @@ -58,11 +58,6 @@ class Bitwarden(Service): def get_backup_description() -> str: return "Password database, encryption certificate and attachments." - @staticmethod - def is_enabled() -> bool: - with ReadUserData() as user_data: - return user_data.get("bitwarden", {}).get("enable", False) - @staticmethod def get_status() -> ServiceStatus: """ @@ -76,22 +71,6 @@ class Bitwarden(Service): """ return get_service_status("vaultwarden.service") - @staticmethod - def enable(): - """Enable Bitwarden service.""" - with WriteUserData() as user_data: - if "bitwarden" not in user_data: - user_data["bitwarden"] = {} - user_data["bitwarden"]["enable"] = True - - @staticmethod - def disable(): - """Disable Bitwarden service.""" - with WriteUserData() as user_data: - if "bitwarden" not in user_data: - user_data["bitwarden"] = {} - user_data["bitwarden"]["enable"] = False - @staticmethod def stop(): subprocess.run(["systemctl", "stop", "vaultwarden.service"]) diff --git a/selfprivacy_api/services/nextcloud/__init__.py b/selfprivacy_api/services/nextcloud/__init__.py index 1703478..0da6dd9 100644 --- a/selfprivacy_api/services/nextcloud/__init__.py +++ b/selfprivacy_api/services/nextcloud/__init__.py @@ -53,11 +53,6 @@ class Nextcloud(Service): def get_backup_description() -> str: return "All the files and other data stored in Nextcloud." - @staticmethod - def is_enabled() -> bool: - with ReadUserData() as user_data: - return user_data.get("nextcloud", {}).get("enable", False) - @staticmethod def get_status() -> ServiceStatus: """ @@ -71,22 +66,6 @@ class Nextcloud(Service): """ return get_service_status("phpfpm-nextcloud.service") - @staticmethod - def enable(): - """Enable Nextcloud service.""" - with WriteUserData() as user_data: - if "nextcloud" not in user_data: - user_data["nextcloud"] = {} - user_data["nextcloud"]["enable"] = True - - @staticmethod - def disable(): - """Disable Nextcloud service.""" - with WriteUserData() as user_data: - if "nextcloud" not in user_data: - user_data["nextcloud"] = {} - user_data["nextcloud"]["enable"] = False - @staticmethod def stop(): """Stop Nextcloud service.""" diff --git a/selfprivacy_api/services/ocserv/__init__.py b/selfprivacy_api/services/ocserv/__init__.py index d9d59a0..a28358d 100644 --- a/selfprivacy_api/services/ocserv/__init__.py +++ b/selfprivacy_api/services/ocserv/__init__.py @@ -51,29 +51,10 @@ class Ocserv(Service): def get_backup_description() -> str: return "Nothing to backup." - @staticmethod - def is_enabled() -> bool: - with ReadUserData() as user_data: - return user_data.get("ocserv", {}).get("enable", False) - @staticmethod def get_status() -> ServiceStatus: return get_service_status("ocserv.service") - @staticmethod - def enable(): - with WriteUserData() as user_data: - if "ocserv" not in user_data: - user_data["ocserv"] = {} - user_data["ocserv"]["enable"] = True - - @staticmethod - def disable(): - with WriteUserData() as user_data: - if "ocserv" not in user_data: - user_data["ocserv"] = {} - user_data["ocserv"]["enable"] = False - @staticmethod def stop(): subprocess.run(["systemctl", "stop", "ocserv.service"], check=False) diff --git a/selfprivacy_api/services/pleroma/__init__.py b/selfprivacy_api/services/pleroma/__init__.py index b2540d8..1aae50e 100644 --- a/selfprivacy_api/services/pleroma/__init__.py +++ b/selfprivacy_api/services/pleroma/__init__.py @@ -50,29 +50,10 @@ class Pleroma(Service): def get_backup_description() -> str: return "Your Pleroma accounts, posts and media." - @staticmethod - def is_enabled() -> bool: - with ReadUserData() as user_data: - return user_data.get("pleroma", {}).get("enable", False) - @staticmethod def get_status() -> ServiceStatus: return get_service_status("pleroma.service") - @staticmethod - def enable(): - with WriteUserData() as user_data: - if "pleroma" not in user_data: - user_data["pleroma"] = {} - user_data["pleroma"]["enable"] = True - - @staticmethod - def disable(): - with WriteUserData() as user_data: - if "pleroma" not in user_data: - user_data["pleroma"] = {} - user_data["pleroma"]["enable"] = False - @staticmethod def stop(): subprocess.run(["systemctl", "stop", "pleroma.service"]) From 8e21e6d378c1eb02083dd06ade6c1dd2fd63241b Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 3 Jan 2024 19:19:29 +0000 Subject: [PATCH 239/246] feature(services): introduce 'modules' field in userdata and group services settings there --- selfprivacy_api/migrations/__init__.py | 2 + selfprivacy_api/migrations/modules_in_json.py | 50 +++++++++++++++ selfprivacy_api/services/service.py | 25 ++++---- tests/data/turned_on.json | 22 ++++--- tests/test_graphql/test_system/turned_on.json | 40 ++++++------ tests/test_migrations.py | 60 ++++++++++++++++++ tests/test_migrations/strays.json | 23 +++++++ tests/test_services.py | 61 +++++++++++++------ 8 files changed, 222 insertions(+), 61 deletions(-) create mode 100644 selfprivacy_api/migrations/modules_in_json.py create mode 100644 tests/test_migrations.py create mode 100644 tests/test_migrations/strays.json diff --git a/selfprivacy_api/migrations/__init__.py b/selfprivacy_api/migrations/__init__.py index 4aa932c..f2d1f0d 100644 --- a/selfprivacy_api/migrations/__init__.py +++ b/selfprivacy_api/migrations/__init__.py @@ -19,6 +19,7 @@ from selfprivacy_api.migrations.migrate_to_selfprivacy_channel import ( ) from selfprivacy_api.migrations.mount_volume import MountVolume from selfprivacy_api.migrations.providers import CreateProviderFields +from selfprivacy_api.migrations.modules_in_json import CreateModulesField from selfprivacy_api.migrations.prepare_for_nixos_2211 import ( MigrateToSelfprivacyChannelFrom2205, ) @@ -37,6 +38,7 @@ migrations = [ MigrateToSelfprivacyChannelFrom2205(), MigrateToSelfprivacyChannelFrom2211(), LoadTokensToRedis(), + CreateModulesField(), ] diff --git a/selfprivacy_api/migrations/modules_in_json.py b/selfprivacy_api/migrations/modules_in_json.py new file mode 100644 index 0000000..64ba7d3 --- /dev/null +++ b/selfprivacy_api/migrations/modules_in_json.py @@ -0,0 +1,50 @@ +from selfprivacy_api.migrations.migration import Migration +from selfprivacy_api.utils import ReadUserData, WriteUserData +from selfprivacy_api.services import get_all_services + + +def migrate_services_to_modules(): + with WriteUserData() as userdata: + if "modules" not in userdata.keys(): + userdata["modules"] = {} + + for service in get_all_services(): + name = service.get_id() + if name in userdata.keys(): + field_content = userdata[name] + userdata["modules"][name] = field_content + del userdata[name] + + +# If you ever want to get rid of modules field you will need to get rid of this migration +class CreateModulesField(Migration): + """introduce 'modules' (services) into userdata""" + + def get_migration_name(self): + return "modules_in_json" + + def get_migration_description(self): + return "Group service settings into a 'modules' field in userdata.json" + + def is_migration_needed(self) -> bool: + try: + with ReadUserData() as userdata: + for service in get_all_services(): + if service.get_id() in userdata.keys(): + return True + + if "modules" not in userdata.keys(): + return True + return False + except Exception as e: + print(e) + return False + + def migrate(self): + # Write info about providers to userdata.json + try: + migrate_services_to_modules() + print("Done") + except Exception as e: + print(e) + print("Error migrating service fields") diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index a53c028..b44f3a9 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -136,7 +136,7 @@ class Service(ABC): """ name = cls.get_id() with ReadUserData() as user_data: - return user_data.get(name, {}).get("enable", False) + return user_data.get("modules", {}).get(name, {}).get("enable", False) @staticmethod @abstractmethod @@ -144,24 +144,25 @@ class Service(ABC): """The status of the service, reported by systemd.""" pass - # But they do not really enable? + @classmethod + def _set_enable(cls, enable: bool): + name = cls.get_id() + with WriteUserData() as user_data: + if "modules" not in user_data: + user_data["modules"] = {} + if name not in user_data["modules"]: + user_data["modules"][name] = {} + user_data["modules"][name]["enable"] = enable + @classmethod def enable(cls): """Enable the service. Usually this means enabling systemd unit.""" - name = cls.get_id() - with WriteUserData() as user_data: - if name not in user_data: - user_data[name] = {} - user_data[name]["enable"] = True + cls._set_enable(True) @classmethod def disable(cls): """Disable the service. Usually this means disabling systemd unit.""" - name = cls.get_id() - with WriteUserData() as user_data: - if name not in user_data: - user_data[name] = {} - user_data[name]["enable"] = False + cls._set_enable(False) @staticmethod @abstractmethod diff --git a/tests/data/turned_on.json b/tests/data/turned_on.json index 2c98e77..1b6219d 100644 --- a/tests/data/turned_on.json +++ b/tests/data/turned_on.json @@ -1,15 +1,9 @@ { "api": {"token": "TEST_TOKEN", "enableSwagger": false}, - "bitwarden": {"enable": true}, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, "resticPassword": "PASS", "ssh": { "enable": true, @@ -17,16 +11,24 @@ "rootKeys": ["ssh-ed25519 KEY test@pc"] }, "username": "tester", - "gitea": {"enable": true}, - "ocserv": {"enable": true}, - "pleroma": {"enable": true}, - "jitsi": {"enable": true}, "autoUpgrade": {"enable": true, "allowReboot": true}, "useBinds": true, "timezone": "Europe/Moscow", "sshKeys": ["ssh-rsa KEY test@pc"], "dns": {"provider": "CLOUDFLARE", "apiKey": "TOKEN"}, "server": {"provider": "HETZNER"}, + "modules": { + "bitwarden": {"enable": true}, + "gitea": {"enable": true}, + "ocserv": {"enable": true}, + "pleroma": {"enable": true}, + "jitsi": {"enable": true}, + "nextcloud": { + "adminPassword": "ADMIN", + "databasePassword": "ADMIN", + "enable": true + } + }, "backup": { "provider": "BACKBLAZE", "accountId": "ID", diff --git a/tests/test_graphql/test_system/turned_on.json b/tests/test_graphql/test_system/turned_on.json index c6b758b..240c6c9 100644 --- a/tests/test_graphql/test_system/turned_on.json +++ b/tests/test_graphql/test_system/turned_on.json @@ -3,18 +3,10 @@ "token": "TEST_TOKEN", "enableSwagger": false }, - "bitwarden": { - "enable": true - }, "databasePassword": "PASSWORD", "domain": "test.tld", "hashedMasterPassword": "HASHED_PASSWORD", "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, "resticPassword": "PASS", "ssh": { "enable": true, @@ -24,17 +16,27 @@ ] }, "username": "tester", - "gitea": { - "enable": true - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "jitsi": { - "enable": true + "modules": { + "gitea": { + "enable": true + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true + }, + "jitsi": { + "enable": true + }, + "nextcloud": { + "adminPassword": "ADMIN", + "databasePassword": "ADMIN", + "enable": true + }, + "bitwarden": { + "enable": true + } }, "autoUpgrade": { "enable": true, diff --git a/tests/test_migrations.py b/tests/test_migrations.py new file mode 100644 index 0000000..55f311a --- /dev/null +++ b/tests/test_migrations.py @@ -0,0 +1,60 @@ +import pytest + +from selfprivacy_api.migrations.modules_in_json import CreateModulesField +from selfprivacy_api.utils import ReadUserData, WriteUserData +from selfprivacy_api.services import get_all_services + + +@pytest.fixture() +def stray_services(mocker, datadir): + mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "strays.json") + return datadir + + +@pytest.fixture() +def empty_json(generic_userdata): + with WriteUserData() as data: + data.clear() + + with ReadUserData() as data: + assert len(data.keys()) == 0 + + return + + +def test_modules_empty_json(empty_json): + with ReadUserData() as data: + assert "modules" not in data.keys() + + assert CreateModulesField().is_migration_needed() + + CreateModulesField().migrate() + assert not CreateModulesField().is_migration_needed() + + with ReadUserData() as data: + assert "modules" in data.keys() + + +@pytest.mark.parametrize("modules_field", [True, False]) +def test_modules_stray_services(modules_field, stray_services): + if not modules_field: + with WriteUserData() as data: + del data["modules"] + assert CreateModulesField().is_migration_needed() + + CreateModulesField().migrate() + + for service in get_all_services(): + # assumes we do not tolerate previous format + assert service.is_enabled() + if service.get_id() == "email": + continue + with ReadUserData() as data: + assert service.get_id() in data["modules"].keys() + assert service.get_id() not in data.keys() + + assert not CreateModulesField().is_migration_needed() + + +def test_modules_no_migration_on_generic_data(generic_userdata): + assert not CreateModulesField().is_migration_needed() diff --git a/tests/test_migrations/strays.json b/tests/test_migrations/strays.json new file mode 100644 index 0000000..ee81350 --- /dev/null +++ b/tests/test_migrations/strays.json @@ -0,0 +1,23 @@ +{ + "bitwarden": { + "enable": true + }, + "nextcloud": { + "adminPassword": "ADMIN", + "databasePassword": "ADMIN", + "enable": true + }, + "gitea": { + "enable": true + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true + }, + "jitsi": { + "enable": true + }, + "modules": {} +} diff --git a/tests/test_services.py b/tests/test_services.py index f3d6adc..65b4dc9 100644 --- a/tests/test_services.py +++ b/tests/test_services.py @@ -7,6 +7,8 @@ from pytest import raises from selfprivacy_api.utils import ReadUserData, WriteUserData from selfprivacy_api.utils.waitloop import wait_until_true +import selfprivacy_api.services as services_module + from selfprivacy_api.services.bitwarden import Bitwarden from selfprivacy_api.services.pleroma import Pleroma from selfprivacy_api.services.mailserver import MailServer @@ -15,6 +17,7 @@ from selfprivacy_api.services.generic_service_mover import FolderMoveNames from selfprivacy_api.services.test_service import DummyService from selfprivacy_api.services.service import Service, ServiceStatus, StoppedService +from selfprivacy_api.services import get_enabled_services from tests.test_dkim import domain_file, dkim_file, no_dkim_file @@ -95,35 +98,49 @@ def test_foldermoves_from_ownedpaths(): def test_enabling_disabling_reads_json(dummy_service: DummyService): with WriteUserData() as data: - data[dummy_service.get_id()]["enable"] = False + data["modules"][dummy_service.get_id()]["enable"] = False assert dummy_service.is_enabled() is False with WriteUserData() as data: - data[dummy_service.get_id()]["enable"] = True + data["modules"][dummy_service.get_id()]["enable"] = True assert dummy_service.is_enabled() is True -@pytest.fixture(params=["normally_enabled", "deleted_attribute", "service_not_in_json"]) +# A helper to test undefined states. Used in fixtures below +def undefine_service_enabled_status(param, dummy_service): + if param == "deleted_attribute": + with WriteUserData() as data: + del data["modules"][dummy_service.get_id()]["enable"] + if param == "service_not_in_json": + with WriteUserData() as data: + del data["modules"][dummy_service.get_id()] + if param == "modules_not_in_json": + with WriteUserData() as data: + del data["modules"] + + +# May be defined or not +@pytest.fixture( + params=[ + "normally_enabled", + "deleted_attribute", + "service_not_in_json", + "modules_not_in_json", + ] +) def possibly_dubiously_enabled_service( dummy_service: DummyService, request ) -> DummyService: - if request.param == "deleted_attribute": - with WriteUserData() as data: - del data[dummy_service.get_id()]["enable"] - if request.param == "service_not_in_json": - with WriteUserData() as data: - del data[dummy_service.get_id()] + if request.param != "normally_enabled": + undefine_service_enabled_status(request.param, dummy_service) return dummy_service -# Yeah, idk yet how to dry it. -@pytest.fixture(params=["deleted_attribute", "service_not_in_json"]) +# Strictly UNdefined +@pytest.fixture( + params=["deleted_attribute", "service_not_in_json", "modules_not_in_json"] +) def undefined_enabledness_service(dummy_service: DummyService, request) -> DummyService: - if request.param == "deleted_attribute": - with WriteUserData() as data: - del data[dummy_service.get_id()]["enable"] - if request.param == "service_not_in_json": - with WriteUserData() as data: - del data[dummy_service.get_id()] + undefine_service_enabled_status(request.param, dummy_service) return dummy_service @@ -141,13 +158,13 @@ def test_enabling_disabling_writes_json( dummy_service.disable() with ReadUserData() as data: - assert data[dummy_service.get_id()]["enable"] is False + assert data["modules"][dummy_service.get_id()]["enable"] is False dummy_service.enable() with ReadUserData() as data: - assert data[dummy_service.get_id()]["enable"] is True + assert data["modules"][dummy_service.get_id()]["enable"] is True dummy_service.disable() with ReadUserData() as data: - assert data[dummy_service.get_id()]["enable"] is False + assert data["modules"][dummy_service.get_id()]["enable"] is False # more detailed testing of this is in test_graphql/test_system.py @@ -158,3 +175,7 @@ def test_mailserver_with_dkim_returns_some_dns(dkim_file): def test_mailserver_with_no_dkim_returns_no_dns(no_dkim_file): assert MailServer().get_dns_records() == [] + + +def test_services_enabled_by_default(generic_userdata): + assert set(get_enabled_services()) == set(services_module.services) From 2b21df9ad35855803ec227c56e88a8c962e97b39 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Wed, 3 Jan 2024 19:30:27 +0000 Subject: [PATCH 240/246] chore(version): bump version to 3.0, no Rest API --- selfprivacy_api/dependencies.py | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/selfprivacy_api/dependencies.py b/selfprivacy_api/dependencies.py index 05c9bdc..1dfc0a9 100644 --- a/selfprivacy_api/dependencies.py +++ b/selfprivacy_api/dependencies.py @@ -27,4 +27,4 @@ async def get_token_header( def get_api_version() -> str: """Get API version""" - return "2.4.3" + return "3.0.0" diff --git a/setup.py b/setup.py index 93637ff..36aa68e 100755 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import setup, find_packages setup( name="selfprivacy_api", - version="2.4.3", + version="3.0.0", packages=find_packages(), scripts=[ "selfprivacy_api/app.py", From 1e9744227ba7233a3187b5310c22c50660e45d0c Mon Sep 17 00:00:00 2001 From: Inex Code Date: Tue, 9 Jan 2024 20:20:58 +0300 Subject: [PATCH 241/246] refactor: Move from nix-shell to nix flake --- .gitignore | 2 + README.md | 67 ++++++++++++++ default.nix | 33 +++++++ flake.lock | 26 ++++++ flake.nix | 50 ++++++++++ nix-dependencies-diagram.puml | 22 +++++ nixos/module.nix | 166 ++++++++++++++++++++++++++++++++++ shell.nix | 48 ---------- 8 files changed, 366 insertions(+), 48 deletions(-) create mode 100644 README.md create mode 100644 default.nix create mode 100644 flake.lock create mode 100644 flake.nix create mode 100644 nix-dependencies-diagram.puml create mode 100644 nixos/module.nix delete mode 100644 shell.nix diff --git a/.gitignore b/.gitignore index 7f93e02..bd62fff 100755 --- a/.gitignore +++ b/.gitignore @@ -148,3 +148,5 @@ cython_debug/ *.db *.rdb + +/result diff --git a/README.md b/README.md new file mode 100644 index 0000000..01ffd88 --- /dev/null +++ b/README.md @@ -0,0 +1,67 @@ +# SelfPrivacy GraphQL API which allows app to control your server + +## build + +```console +$ nix build +``` + +As a result, you should get the `./result` symlink to a folder (in `/nix/store`) with build contents. + +## develop & test + +```console +$ nix develop +$ [SP devshell] pytest . +=================================== test session starts ===================================== +platform linux -- Python 3.10.11, pytest-7.1.3, pluggy-1.0.0 +rootdir: /data/selfprivacy/selfprivacy-rest-api +plugins: anyio-3.5.0, datadir-1.4.1, mock-3.8.2 +collected 692 items + +tests/test_block_device_utils.py ................. [ 2%] +tests/test_common.py ..... [ 3%] +tests/test_jobs.py ........ [ 4%] +tests/test_model_storage.py .. [ 4%] +tests/test_models.py .. [ 4%] +tests/test_network_utils.py ...... [ 5%] +tests/test_services.py ...... [ 6%] +tests/test_graphql/test_api.py . [ 6%] +tests/test_graphql/test_api_backup.py ............... [ 8%] +tests/test_graphql/test_api_devices.py ................. [ 11%] +tests/test_graphql/test_api_recovery.py ......... [ 12%] +tests/test_graphql/test_api_version.py .. [ 13%] +tests/test_graphql/test_backup.py ............................... [ 21%] +tests/test_graphql/test_localsecret.py ... [ 22%] +tests/test_graphql/test_ssh.py ............ [ 23%] +tests/test_graphql/test_system.py ............................. [ 28%] +tests/test_graphql/test_system_nixos_tasks.py ........ [ 29%] +tests/test_graphql/test_users.py .................................. [ 42%] +tests/test_graphql/test_repository/test_json_tokens_repository.py [ 44%] +tests/test_graphql/test_repository/test_tokens_repository.py .... [ 53%] +tests/test_rest_endpoints/test_auth.py .......................... [ 58%] +tests/test_rest_endpoints/test_system.py ........................ [ 63%] +tests/test_rest_endpoints/test_users.py ................................ [ 76%] +tests/test_rest_endpoints/services/test_bitwarden.py ............ [ 78%] +tests/test_rest_endpoints/services/test_gitea.py .............. [ 80%] +tests/test_rest_endpoints/services/test_mailserver.py ..... [ 81%] +tests/test_rest_endpoints/services/test_nextcloud.py ............ [ 83%] +tests/test_rest_endpoints/services/test_ocserv.py .............. [ 85%] +tests/test_rest_endpoints/services/test_pleroma.py .............. [ 87%] +tests/test_rest_endpoints/services/test_services.py .... [ 88%] +tests/test_rest_endpoints/services/test_ssh.py ..................... [100%] + +============================== 692 passed in 352.76s (0:05:52) =============================== +``` + +## dependencies and dependant modules + +Current flake inherits nixpkgs from NixOS configuration flake. So there is no need to refer to extra nixpkgs dependency if you want to be aligned with exact NixOS configuration. + +![diagram](http://www.plantuml.com/plantuml/proxy?src=https://git.selfprivacy.org/SelfPrivacy/selfprivacy-rest-api/raw/branch/master/nix-dependencies-diagram.puml) + +Nix code for NixOS service module for API is located in NixOS configuration repository. + +## current issues + +- It's not clear how to store in this repository information about several compatible NixOS configuration commits, where API application tests pass. Currently, here is only a single `flake.lock`. diff --git a/default.nix b/default.nix new file mode 100644 index 0000000..1c779d9 --- /dev/null +++ b/default.nix @@ -0,0 +1,33 @@ +{ pythonPackages, rev ? "local" }: + +pythonPackages.buildPythonPackage rec { + pname = "selfprivacy-graphql-api"; + version = rev; + src = builtins.filterSource (p: t: p != ".git" && t != "symlink") ./.; + nativeCheckInputs = [ pythonPackages.pytestCheckHook ]; + propagatedBuildInputs = with pythonPackages; [ + fastapi + gevent + huey + mnemonic + portalocker + psutil + pydantic + pytest + pytest-datadir + pytest-mock + pytz + redis + setuptools + strawberry-graphql + typing-extensions + uvicorn + ]; + pythonImportsCheck = [ "selfprivacy_api" ]; + doCheck = false; + meta = { + description = '' + SelfPrivacy Server Management API + ''; + }; +} diff --git a/flake.lock b/flake.lock new file mode 100644 index 0000000..3dd8a15 --- /dev/null +++ b/flake.lock @@ -0,0 +1,26 @@ +{ + "nodes": { + "nixpkgs": { + "locked": { + "lastModified": 1702780907, + "narHash": "sha256-blbrBBXjjZt6OKTcYX1jpe9SRof2P9ZYWPzq22tzXAA=", + "owner": "nixos", + "repo": "nixpkgs", + "rev": "1e2e384c5b7c50dbf8e9c441a9e58d85f408b01f", + "type": "github" + }, + "original": { + "owner": "nixos", + "repo": "nixpkgs", + "type": "github" + } + }, + "root": { + "inputs": { + "nixpkgs": "nixpkgs" + } + } + }, + "root": "root", + "version": 7 +} diff --git a/flake.nix b/flake.nix new file mode 100644 index 0000000..c133604 --- /dev/null +++ b/flake.nix @@ -0,0 +1,50 @@ +{ + description = "SelfPrivacy API flake"; + + inputs.nixpkgs.url = "github:nixos/nixpkgs"; + + outputs = { self, nixpkgs, ... }: + let + system = "x86_64-linux"; + pkgs = nixpkgs.legacyPackages.${system}; + selfprivacy-graphql-api = pkgs.callPackage ./default.nix { + pythonPackages = pkgs.python310Packages; + rev = self.shortRev or self.dirtyShortRev or "dirty"; + }; + in + { + packages.${system}.default = selfprivacy-graphql-api; + nixosModules.default = + import ./nixos/module.nix self.packages.${system}.default; + devShells.${system}.default = pkgs.mkShell { + packages = + let + # TODO is there a better way to get environment for VS Code? + python3 = + nixpkgs.lib.findFirst (p: p.pname == "python3") (abort "wtf") + self.packages.${system}.default.propagatedBuildInputs; + python-env = + python3.withPackages + (_: self.packages.${system}.default.propagatedBuildInputs); + in + with pkgs; [ + python-env + black + rclone + redis + restic + ]; + shellHook = '' + # envs set with export and as attributes are treated differently. + # for example. printenv will not fetch the value of an attribute. + export USE_REDIS_PORT=6379 + export TEST_MODE=true + pkill redis-server + sleep 2 + setsid redis-server --bind 127.0.0.1 --port $USE_REDIS_PORT >/dev/null 2>/dev/null & + # maybe set more env-vars + ''; + }; + }; + nixConfig.bash-prompt = ''\n\[\e[1;32m\][\[\e[0m\]\[\e[1;34m\]SP devshell\[\e[0m\]\[\e[1;32m\]:\w]\$\[\[\e[0m\] ''; +} diff --git a/nix-dependencies-diagram.puml b/nix-dependencies-diagram.puml new file mode 100644 index 0000000..de98bf7 --- /dev/null +++ b/nix-dependencies-diagram.puml @@ -0,0 +1,22 @@ +@startuml + +left to right direction + +title repositories and flake inputs relations diagram + +cloud nixpkgs as nixpkgs_transit +control "nixos-rebuild" as nixos_rebuild +component "SelfPrivacy\nAPI app" as selfprivacy_app +component "SelfPrivacy\nNixOS configuration" as nixos_configuration + +note top of nixos_configuration : SelfPrivacy\nAPI service module + +nixos_configuration ).. nixpkgs_transit +nixpkgs_transit ..> selfprivacy_app +selfprivacy_app --> nixos_configuration +[nixpkgs] --> nixos_configuration +nixos_configuration -> nixos_rebuild + +footer %date("yyyy-MM-dd'T'HH:mmZ") + +@enduml diff --git a/nixos/module.nix b/nixos/module.nix new file mode 100644 index 0000000..7790e18 --- /dev/null +++ b/nixos/module.nix @@ -0,0 +1,166 @@ +selfprivacy-graphql-api: { config, lib, pkgs, ... }: + +let + cfg = config.services.selfprivacy-api; + config-id = "default"; + nixos-rebuild = "${config.system.build.nixos-rebuild}/bin/nixos-rebuild"; + nix = "${config.nix.package.out}/bin/nix"; +in +{ + options.services.selfprivacy-api = { + enable = lib.mkOption { + default = true; + type = lib.types.bool; + description = '' + Enable SelfPrivacy API service + ''; + }; + }; + config = lib.mkIf cfg.enable { + users.users."selfprivacy-api" = { + isNormalUser = false; + isSystemUser = true; + extraGroups = [ "opendkim" ]; + group = "selfprivacy-api"; + }; + users.groups."selfprivacy-api".members = [ "selfprivacy-api" ]; + + systemd.services.selfprivacy-api = { + description = "API Server used to control system from the mobile application"; + environment = config.nix.envVars // { + HOME = "/root"; + PYTHONUNBUFFERED = "1"; + } // config.networking.proxy.envVars; + path = [ + "/var/" + "/var/dkim/" + pkgs.coreutils + pkgs.gnutar + pkgs.xz.bin + pkgs.gzip + pkgs.gitMinimal + config.nix.package.out + pkgs.restic + pkgs.mkpasswd + pkgs.util-linux + pkgs.e2fsprogs + pkgs.iproute2 + ]; + after = [ "network-online.target" ]; + wantedBy = [ "network-online.target" ]; + serviceConfig = { + User = "root"; + ExecStart = "${selfprivacy-graphql-api}/bin/app.py"; + Restart = "always"; + RestartSec = "5"; + }; + }; + systemd.services.selfprivacy-api-worker = { + description = "Task worker for SelfPrivacy API"; + environment = config.nix.envVars // { + HOME = "/root"; + PYTHONUNBUFFERED = "1"; + PYTHONPATH = + pkgs.python310Packages.makePythonPath [ selfprivacy-graphql-api ]; + } // config.networking.proxy.envVars; + path = [ + "/var/" + "/var/dkim/" + pkgs.coreutils + pkgs.gnutar + pkgs.xz.bin + pkgs.gzip + pkgs.gitMinimal + config.nix.package.out + pkgs.restic + pkgs.mkpasswd + pkgs.util-linux + pkgs.e2fsprogs + pkgs.iproute2 + ]; + after = [ "network-online.target" ]; + wantedBy = [ "network-online.target" ]; + serviceConfig = { + User = "root"; + ExecStart = "${pkgs.python310Packages.huey}/bin/huey_consumer.py selfprivacy_api.task_registry.huey"; + Restart = "always"; + RestartSec = "5"; + }; + }; + # One shot systemd service to rebuild NixOS using nixos-rebuild + systemd.services.sp-nixos-rebuild = { + description = "nixos-rebuild switch"; + environment = config.nix.envVars // { + HOME = "/root"; + } // config.networking.proxy.envVars; + # TODO figure out how to get dependencies list reliably + path = [ pkgs.coreutils pkgs.gnutar pkgs.xz.bin pkgs.gzip pkgs.gitMinimal config.nix.package.out ]; + # TODO set proper timeout for reboot instead of service restart + serviceConfig = { + User = "root"; + WorkingDirectory = "/etc/nixos"; + # sync top-level flake with sp-modules sub-flake + # (https://github.com/NixOS/nix/issues/9339) + ExecStartPre = '' + ${nix} flake lock --override-input sp-modules path:./sp-modules + ''; + ExecStart = '' + ${nixos-rebuild} switch --flake .#${config-id} + ''; + KillMode = "none"; + SendSIGKILL = "no"; + }; + restartIfChanged = false; + unitConfig.X-StopOnRemoval = false; + }; + # One shot systemd service to upgrade NixOS using nixos-rebuild + systemd.services.sp-nixos-upgrade = { + # protection against simultaneous runs + after = [ "sp-nixos-rebuild.service" ]; + description = "Upgrade NixOS and SP modules to latest versions"; + environment = config.nix.envVars // { + HOME = "/root"; + } // config.networking.proxy.envVars; + # TODO figure out how to get dependencies list reliably + path = [ pkgs.coreutils pkgs.gnutar pkgs.xz.bin pkgs.gzip pkgs.gitMinimal config.nix.package.out ]; + serviceConfig = { + User = "root"; + WorkingDirectory = "/etc/nixos"; + # TODO get URL from systemd template parameter? + ExecStartPre = '' + ${nix} flake update \ + --override-input selfprivacy-nixos-config git+https://git.selfprivacy.org/SelfPrivacy/selfprivacy-nixos-config.git?ref=flakes + ''; + ExecStart = '' + ${nixos-rebuild} switch --flake .#${config-id} + ''; + KillMode = "none"; + SendSIGKILL = "no"; + }; + restartIfChanged = false; + unitConfig.X-StopOnRemoval = false; + }; + # One shot systemd service to rollback NixOS using nixos-rebuild + systemd.services.sp-nixos-rollback = { + # protection against simultaneous runs + after = [ "sp-nixos-rebuild.service" "sp-nixos-upgrade.service" ]; + description = "Rollback NixOS using nixos-rebuild"; + environment = config.nix.envVars // { + HOME = "/root"; + } // config.networking.proxy.envVars; + # TODO figure out how to get dependencies list reliably + path = [ pkgs.coreutils pkgs.gnutar pkgs.xz.bin pkgs.gzip pkgs.gitMinimal config.nix.package.out ]; + serviceConfig = { + User = "root"; + WorkingDirectory = "/etc/nixos"; + ExecStart = '' + ${nixos-rebuild} switch --rollback --flake .#${config-id} + ''; + KillMode = "none"; + SendSIGKILL = "no"; + }; + restartIfChanged = false; + unitConfig.X-StopOnRemoval = false; + }; + }; +} diff --git a/shell.nix b/shell.nix deleted file mode 100644 index bce16bd..0000000 --- a/shell.nix +++ /dev/null @@ -1,48 +0,0 @@ -{ pkgs ? import { } }: -let - sp-python = pkgs.python310.withPackages (p: with p; [ - setuptools - portalocker - pytz - pytest - pytest-mock - pytest-datadir - huey - gevent - mnemonic - coverage - pylint - rope - mypy - pylsp-mypy - pydantic - typing-extensions - psutil - black - fastapi - uvicorn - redis - strawberry-graphql - flake8-bugbear - flake8 - ]); -in -pkgs.mkShell { - buildInputs = [ - sp-python - pkgs.black - pkgs.redis - pkgs.restic - pkgs.rclone - ]; - shellHook = '' - PYTHONPATH=${sp-python}/${sp-python.sitePackages} - # envs set with export and as attributes are treated differently. - # for example. printenv will not fetch the value of an attribute. - export USE_REDIS_PORT=6379 - pkill redis-server - sleep 2 - setsid redis-server --bind 127.0.0.1 --port $USE_REDIS_PORT >/dev/null 2>/dev/null & - # maybe set more env-vars - ''; -} From b6f436d8b333e37d7d0e13b338d876a53ae6cccd Mon Sep 17 00:00:00 2001 From: Inex Code Date: Tue, 9 Jan 2024 21:58:09 +0300 Subject: [PATCH 242/246] refactor: Adapt API to the NixOS configuration changes --- selfprivacy_api/actions/ssh.py | 2 +- selfprivacy_api/actions/system.py | 2 +- .../backup/backuppers/restic_backupper.py | 1 - selfprivacy_api/graphql/common_types/user.py | 2 - selfprivacy_api/graphql/queries/jobs.py | 1 - selfprivacy_api/migrations/__init__.py | 35 +-- .../check_for_failed_binds_migration.py | 48 ---- .../migrations/create_tokens_json.py | 58 ----- .../migrations/fix_nixos_config_branch.py | 57 ---- .../migrate_to_selfprivacy_channel.py | 49 ---- selfprivacy_api/migrations/modules_in_json.py | 50 ---- selfprivacy_api/migrations/mount_volume.py | 51 ---- .../migrations/prepare_for_nixos_2211.py | 58 ----- .../migrations/prepare_for_nixos_2305.py | 58 ----- selfprivacy_api/migrations/providers.py | 43 --- selfprivacy_api/migrations/redis_tokens.py | 48 ---- .../migrations/write_token_to_redis.py | 63 +++++ .../repositories/tokens/__init__.py | 8 - .../tokens/json_tokens_repository.py | 153 ----------- selfprivacy_api/services/__init__.py | 4 +- .../services/generic_service_mover.py | 8 +- .../services/{jitsi => jitsimeet}/__init__.py | 14 +- .../services/{jitsi => jitsimeet}/icon.py | 0 .../services/mailserver/__init__.py | 4 +- selfprivacy_api/services/service.py | 10 +- selfprivacy_api/utils/__init__.py | 41 ++- selfprivacy_api/utils/huey.py | 5 +- tests/common.py | 24 +- tests/conftest.py | 74 ++---- tests/data/jobs.json | 1 - tests/data/tokens.json | 14 - tests/data/turned_on.json | 98 ++++--- tests/test_block_device_utils.py | 2 +- tests/test_block_device_utils/no_devices.json | 92 +++---- tests/test_block_device_utils/only_root.json | 101 ++++---- tests/test_block_device_utils/undefined.json | 87 ++++--- tests/test_dkim.py | 14 +- tests/test_graphql/test_api.py | 2 +- tests/test_graphql/test_api_devices.py | 32 +-- tests/test_graphql/test_api_recovery.py | 43 ++- tests/test_graphql/test_services.py | 2 +- tests/test_graphql/test_ssh.py | 2 - tests/test_graphql/test_ssh/some_users.json | 95 +++---- tests/test_graphql/test_system.py | 29 +-- tests/test_graphql/test_system/domain | 1 - tests/test_graphql/test_system/no_values.json | 89 ++++--- .../test_graphql/test_system/turned_off.json | 94 ++++--- tests/test_graphql/test_system/turned_on.json | 103 ++++---- tests/test_graphql/test_system/undefined.json | 84 +++--- tests/test_graphql/test_system_nixos_tasks.py | 6 - tests/test_graphql/test_users.py | 2 - tests/test_graphql/test_users/no_users.json | 96 +++---- tests/test_graphql/test_users/one_user.json | 95 +++---- tests/test_graphql/test_users/some_users.json | 95 +++---- tests/test_graphql/test_users/undefined.json | 93 ++++--- tests/test_migrations.py | 60 ----- tests/test_migrations/strays.json | 23 -- .../test_json_tokens_repository.py | 245 ------------------ .../empty_keys.json | 9 - .../null_keys.json | 26 -- .../test_json_tokens_repository/tokens.json | 35 --- .../test_repository/test_tokens_repository.py | 36 +-- .../test_tokens_repository/empty_keys.json | 9 - .../test_tokens_repository/null_keys.json | 26 -- .../test_tokens_repository/tokens.json | 35 --- tests/test_services.py | 2 +- tests/test_ssh.py | 4 +- tests/test_users.py | 29 ++- 68 files changed, 875 insertions(+), 2007 deletions(-) delete mode 100644 selfprivacy_api/migrations/check_for_failed_binds_migration.py delete mode 100644 selfprivacy_api/migrations/create_tokens_json.py delete mode 100644 selfprivacy_api/migrations/fix_nixos_config_branch.py delete mode 100644 selfprivacy_api/migrations/migrate_to_selfprivacy_channel.py delete mode 100644 selfprivacy_api/migrations/modules_in_json.py delete mode 100644 selfprivacy_api/migrations/mount_volume.py delete mode 100644 selfprivacy_api/migrations/prepare_for_nixos_2211.py delete mode 100644 selfprivacy_api/migrations/prepare_for_nixos_2305.py delete mode 100644 selfprivacy_api/migrations/providers.py delete mode 100644 selfprivacy_api/migrations/redis_tokens.py create mode 100644 selfprivacy_api/migrations/write_token_to_redis.py delete mode 100644 selfprivacy_api/repositories/tokens/json_tokens_repository.py rename selfprivacy_api/services/{jitsi => jitsimeet}/__init__.py (90%) rename selfprivacy_api/services/{jitsi => jitsimeet}/icon.py (100%) delete mode 100644 tests/data/jobs.json delete mode 100644 tests/data/tokens.json delete mode 100644 tests/test_graphql/test_system/domain delete mode 100644 tests/test_migrations.py delete mode 100644 tests/test_migrations/strays.json delete mode 100644 tests/test_repository/test_json_tokens_repository.py delete mode 100644 tests/test_repository/test_json_tokens_repository/empty_keys.json delete mode 100644 tests/test_repository/test_json_tokens_repository/null_keys.json delete mode 100644 tests/test_repository/test_json_tokens_repository/tokens.json delete mode 100644 tests/test_repository/test_tokens_repository/empty_keys.json delete mode 100644 tests/test_repository/test_tokens_repository/null_keys.json delete mode 100644 tests/test_repository/test_tokens_repository/tokens.json diff --git a/selfprivacy_api/actions/ssh.py b/selfprivacy_api/actions/ssh.py index 8a92735..0c529ef 100644 --- a/selfprivacy_api/actions/ssh.py +++ b/selfprivacy_api/actions/ssh.py @@ -31,7 +31,7 @@ def get_ssh_settings() -> UserdataSshSettings: if "enable" not in data["ssh"]: data["ssh"]["enable"] = True if "passwordAuthentication" not in data["ssh"]: - data["ssh"]["passwordAuthentication"] = True + data["ssh"]["passwordAuthentication"] = False if "rootKeys" not in data["ssh"]: data["ssh"]["rootKeys"] = [] return UserdataSshSettings(**data["ssh"]) diff --git a/selfprivacy_api/actions/system.py b/selfprivacy_api/actions/system.py index f5e0dc0..13c3708 100644 --- a/selfprivacy_api/actions/system.py +++ b/selfprivacy_api/actions/system.py @@ -13,7 +13,7 @@ def get_timezone() -> str: with ReadUserData() as user_data: if "timezone" in user_data: return user_data["timezone"] - return "Europe/Uzhgorod" + return "Etc/UTC" class InvalidTimezone(Exception): diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index 0d74d9c..a8d4e05 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -372,7 +372,6 @@ class ResticBackupper(AbstractBackupper): stderr=subprocess.STDOUT, shell=False, ) as handle: - # for some reason restore does not support # nice reporting of progress via json output = handle.communicate()[0].decode("utf-8") diff --git a/selfprivacy_api/graphql/common_types/user.py b/selfprivacy_api/graphql/common_types/user.py index 26ad6f2..a515821 100644 --- a/selfprivacy_api/graphql/common_types/user.py +++ b/selfprivacy_api/graphql/common_types/user.py @@ -17,7 +17,6 @@ class UserType(Enum): @strawberry.type class User: - user_type: UserType username: str # userHomeFolderspace: UserHomeFolderUsage @@ -32,7 +31,6 @@ class UserMutationReturn(MutationReturnInterface): def get_user_by_username(username: str) -> typing.Optional[User]: - user = users_actions.get_user_by_username(username) if user is None: return None diff --git a/selfprivacy_api/graphql/queries/jobs.py b/selfprivacy_api/graphql/queries/jobs.py index 49bcbd7..e7b99e6 100644 --- a/selfprivacy_api/graphql/queries/jobs.py +++ b/selfprivacy_api/graphql/queries/jobs.py @@ -15,7 +15,6 @@ from selfprivacy_api.jobs import Jobs class Job: @strawberry.field def get_jobs(self) -> typing.List[ApiJob]: - Jobs.get_jobs() return [job_to_api_job(job) for job in Jobs.get_jobs()] diff --git a/selfprivacy_api/migrations/__init__.py b/selfprivacy_api/migrations/__init__.py index f2d1f0d..5e05b2d 100644 --- a/selfprivacy_api/migrations/__init__.py +++ b/selfprivacy_api/migrations/__init__.py @@ -8,37 +8,12 @@ at api.skippedMigrations in userdata.json and populating it with IDs of the migrations to skip. Adding DISABLE_ALL to that array disables the migrations module entirely. """ -from selfprivacy_api.migrations.check_for_failed_binds_migration import ( - CheckForFailedBindsMigration, -) -from selfprivacy_api.utils import ReadUserData -from selfprivacy_api.migrations.fix_nixos_config_branch import FixNixosConfigBranch -from selfprivacy_api.migrations.create_tokens_json import CreateTokensJson -from selfprivacy_api.migrations.migrate_to_selfprivacy_channel import ( - MigrateToSelfprivacyChannel, -) -from selfprivacy_api.migrations.mount_volume import MountVolume -from selfprivacy_api.migrations.providers import CreateProviderFields -from selfprivacy_api.migrations.modules_in_json import CreateModulesField -from selfprivacy_api.migrations.prepare_for_nixos_2211 import ( - MigrateToSelfprivacyChannelFrom2205, -) -from selfprivacy_api.migrations.prepare_for_nixos_2305 import ( - MigrateToSelfprivacyChannelFrom2211, -) -from selfprivacy_api.migrations.redis_tokens import LoadTokensToRedis + +from selfprivacy_api.utils import ReadUserData, UserDataFiles +from selfprivacy_api.migrations.write_token_to_redis import WriteTokenToRedis migrations = [ - FixNixosConfigBranch(), - CreateTokensJson(), - MigrateToSelfprivacyChannel(), - MountVolume(), - CheckForFailedBindsMigration(), - CreateProviderFields(), - MigrateToSelfprivacyChannelFrom2205(), - MigrateToSelfprivacyChannelFrom2211(), - LoadTokensToRedis(), - CreateModulesField(), + WriteTokenToRedis(), ] @@ -47,7 +22,7 @@ def run_migrations(): Go over all migrations. If they are not skipped in userdata file, run them if the migration needed. """ - with ReadUserData() as data: + with ReadUserData(UserDataFiles.SECRETS) as data: if "api" not in data: skipped_migrations = [] elif "skippedMigrations" not in data["api"]: diff --git a/selfprivacy_api/migrations/check_for_failed_binds_migration.py b/selfprivacy_api/migrations/check_for_failed_binds_migration.py deleted file mode 100644 index 41d56b2..0000000 --- a/selfprivacy_api/migrations/check_for_failed_binds_migration.py +++ /dev/null @@ -1,48 +0,0 @@ -from selfprivacy_api.jobs import JobStatus, Jobs - -from selfprivacy_api.migrations.migration import Migration -from selfprivacy_api.utils import WriteUserData - - -class CheckForFailedBindsMigration(Migration): - """Mount volume.""" - - def get_migration_name(self): - return "check_for_failed_binds_migration" - - def get_migration_description(self): - return "If binds migration failed, try again." - - def is_migration_needed(self): - try: - jobs = Jobs.get_jobs() - # If there is a job with type_id "migrations.migrate_to_binds" and status is not "FINISHED", - # then migration is needed and job is deleted - for job in jobs: - if ( - job.type_id == "migrations.migrate_to_binds" - and job.status != JobStatus.FINISHED - ): - return True - return False - except Exception as e: - print(e) - return False - - def migrate(self): - # Get info about existing volumes - # Write info about volumes to userdata.json - try: - jobs = Jobs.get_jobs() - for job in jobs: - if ( - job.type_id == "migrations.migrate_to_binds" - and job.status != JobStatus.FINISHED - ): - Jobs.remove(job) - with WriteUserData() as userdata: - userdata["useBinds"] = False - print("Done") - except Exception as e: - print(e) - print("Error mounting volume") diff --git a/selfprivacy_api/migrations/create_tokens_json.py b/selfprivacy_api/migrations/create_tokens_json.py deleted file mode 100644 index 38702f8..0000000 --- a/selfprivacy_api/migrations/create_tokens_json.py +++ /dev/null @@ -1,58 +0,0 @@ -from datetime import datetime -import os -import json -from pathlib import Path - -from selfprivacy_api.migrations.migration import Migration -from selfprivacy_api.utils import TOKENS_FILE, ReadUserData - - -class CreateTokensJson(Migration): - def get_migration_name(self): - return "create_tokens_json" - - def get_migration_description(self): - return """Selfprivacy API used a single token in userdata.json for authentication. - This migration creates a new tokens.json file with the old token in it. - This migration runs if the tokens.json file does not exist. - Old token is located at ["api"]["token"] in userdata.json. - tokens.json path is declared in TOKENS_FILE imported from utils.py - tokens.json must have the following format: - { - "tokens": [ - { - "token": "token_string", - "name": "Master Token", - "date": "current date from str(datetime.now())", - } - ] - } - tokens.json must have 0600 permissions. - """ - - def is_migration_needed(self): - return not os.path.exists(TOKENS_FILE) - - def migrate(self): - try: - print(f"Creating tokens.json file at {TOKENS_FILE}") - with ReadUserData() as userdata: - token = userdata["api"]["token"] - # Touch tokens.json with 0600 permissions - Path(TOKENS_FILE).touch(mode=0o600) - # Write token to tokens.json - structure = { - "tokens": [ - { - "token": token, - "name": "primary_token", - "date": str(datetime.now()), - } - ] - } - with open(TOKENS_FILE, "w", encoding="utf-8") as tokens: - json.dump(structure, tokens, indent=4) - print("Done") - except Exception as e: - print(e) - print("Error creating tokens.json") diff --git a/selfprivacy_api/migrations/fix_nixos_config_branch.py b/selfprivacy_api/migrations/fix_nixos_config_branch.py deleted file mode 100644 index fbb994c..0000000 --- a/selfprivacy_api/migrations/fix_nixos_config_branch.py +++ /dev/null @@ -1,57 +0,0 @@ -import os -import subprocess - -from selfprivacy_api.migrations.migration import Migration - - -class FixNixosConfigBranch(Migration): - def get_migration_name(self): - return "fix_nixos_config_branch" - - def get_migration_description(self): - return """Mobile SelfPrivacy app introduced a bug in version 0.4.0. - New servers were initialized with a rolling-testing nixos config branch. - This was fixed in app version 0.4.2, but existing servers were not updated. - This migration fixes this by changing the nixos config branch to master. - """ - - def is_migration_needed(self): - """Check the current branch of /etc/nixos and return True if it is rolling-testing""" - current_working_directory = os.getcwd() - try: - os.chdir("/etc/nixos") - nixos_config_branch = subprocess.check_output( - ["git", "rev-parse", "--abbrev-ref", "HEAD"], start_new_session=True - ) - os.chdir(current_working_directory) - return nixos_config_branch.decode("utf-8").strip() == "rolling-testing" - except subprocess.CalledProcessError: - os.chdir(current_working_directory) - return False - - def migrate(self): - """Affected server pulled the config with the --single-branch flag. - Git config remote.origin.fetch has to be changed, so all branches will be fetched. - Then, fetch all branches, pull and switch to master branch. - """ - print("Fixing Nixos config branch") - current_working_directory = os.getcwd() - try: - os.chdir("/etc/nixos") - - subprocess.check_output( - [ - "git", - "config", - "remote.origin.fetch", - "+refs/heads/*:refs/remotes/origin/*", - ] - ) - subprocess.check_output(["git", "fetch", "--all"]) - subprocess.check_output(["git", "pull"]) - subprocess.check_output(["git", "checkout", "master"]) - os.chdir(current_working_directory) - print("Done") - except subprocess.CalledProcessError: - os.chdir(current_working_directory) - print("Error") diff --git a/selfprivacy_api/migrations/migrate_to_selfprivacy_channel.py b/selfprivacy_api/migrations/migrate_to_selfprivacy_channel.py deleted file mode 100644 index 9bfd670..0000000 --- a/selfprivacy_api/migrations/migrate_to_selfprivacy_channel.py +++ /dev/null @@ -1,49 +0,0 @@ -import os -import subprocess - -from selfprivacy_api.migrations.migration import Migration - - -class MigrateToSelfprivacyChannel(Migration): - """Migrate to selfprivacy Nix channel.""" - - def get_migration_name(self): - return "migrate_to_selfprivacy_channel" - - def get_migration_description(self): - return "Migrate to selfprivacy Nix channel." - - def is_migration_needed(self): - try: - output = subprocess.check_output( - ["nix-channel", "--list"], start_new_session=True - ) - output = output.decode("utf-8") - first_line = output.split("\n", maxsplit=1)[0] - return first_line.startswith("nixos") and ( - first_line.endswith("nixos-21.11") or first_line.endswith("nixos-21.05") - ) - except subprocess.CalledProcessError: - return False - - def migrate(self): - # Change the channel and update them. - # Also, go to /etc/nixos directory and make a git pull - current_working_directory = os.getcwd() - try: - print("Changing channel") - os.chdir("/etc/nixos") - subprocess.check_output( - [ - "nix-channel", - "--add", - "https://channel.selfprivacy.org/nixos-selfpricacy", - "nixos", - ] - ) - subprocess.check_output(["nix-channel", "--update"]) - subprocess.check_output(["git", "pull"]) - os.chdir(current_working_directory) - except subprocess.CalledProcessError: - os.chdir(current_working_directory) - print("Error") diff --git a/selfprivacy_api/migrations/modules_in_json.py b/selfprivacy_api/migrations/modules_in_json.py deleted file mode 100644 index 64ba7d3..0000000 --- a/selfprivacy_api/migrations/modules_in_json.py +++ /dev/null @@ -1,50 +0,0 @@ -from selfprivacy_api.migrations.migration import Migration -from selfprivacy_api.utils import ReadUserData, WriteUserData -from selfprivacy_api.services import get_all_services - - -def migrate_services_to_modules(): - with WriteUserData() as userdata: - if "modules" not in userdata.keys(): - userdata["modules"] = {} - - for service in get_all_services(): - name = service.get_id() - if name in userdata.keys(): - field_content = userdata[name] - userdata["modules"][name] = field_content - del userdata[name] - - -# If you ever want to get rid of modules field you will need to get rid of this migration -class CreateModulesField(Migration): - """introduce 'modules' (services) into userdata""" - - def get_migration_name(self): - return "modules_in_json" - - def get_migration_description(self): - return "Group service settings into a 'modules' field in userdata.json" - - def is_migration_needed(self) -> bool: - try: - with ReadUserData() as userdata: - for service in get_all_services(): - if service.get_id() in userdata.keys(): - return True - - if "modules" not in userdata.keys(): - return True - return False - except Exception as e: - print(e) - return False - - def migrate(self): - # Write info about providers to userdata.json - try: - migrate_services_to_modules() - print("Done") - except Exception as e: - print(e) - print("Error migrating service fields") diff --git a/selfprivacy_api/migrations/mount_volume.py b/selfprivacy_api/migrations/mount_volume.py deleted file mode 100644 index 27fba83..0000000 --- a/selfprivacy_api/migrations/mount_volume.py +++ /dev/null @@ -1,51 +0,0 @@ -import os -import subprocess - -from selfprivacy_api.migrations.migration import Migration -from selfprivacy_api.utils import ReadUserData, WriteUserData -from selfprivacy_api.utils.block_devices import BlockDevices - - -class MountVolume(Migration): - """Mount volume.""" - - def get_migration_name(self): - return "mount_volume" - - def get_migration_description(self): - return "Mount volume if it is not mounted." - - def is_migration_needed(self): - try: - with ReadUserData() as userdata: - return "volumes" not in userdata - except Exception as e: - print(e) - return False - - def migrate(self): - # Get info about existing volumes - # Write info about volumes to userdata.json - try: - volumes = BlockDevices().get_block_devices() - # If there is an unmounted volume sdb, - # Write it to userdata.json - is_there_a_volume = False - for volume in volumes: - if volume.name == "sdb": - is_there_a_volume = True - break - with WriteUserData() as userdata: - userdata["volumes"] = [] - if is_there_a_volume: - userdata["volumes"].append( - { - "device": "/dev/sdb", - "mountPoint": "/volumes/sdb", - "fsType": "ext4", - } - ) - print("Done") - except Exception as e: - print(e) - print("Error mounting volume") diff --git a/selfprivacy_api/migrations/prepare_for_nixos_2211.py b/selfprivacy_api/migrations/prepare_for_nixos_2211.py deleted file mode 100644 index 849c262..0000000 --- a/selfprivacy_api/migrations/prepare_for_nixos_2211.py +++ /dev/null @@ -1,58 +0,0 @@ -import os -import subprocess - -from selfprivacy_api.migrations.migration import Migration - - -class MigrateToSelfprivacyChannelFrom2205(Migration): - """Migrate to selfprivacy Nix channel. - For some reason NixOS 22.05 servers initialized with the nixos channel instead of selfprivacy. - This stops us from upgrading to NixOS 22.11 - """ - - def get_migration_name(self): - return "migrate_to_selfprivacy_channel_from_2205" - - def get_migration_description(self): - return "Migrate to selfprivacy Nix channel from NixOS 22.05." - - def is_migration_needed(self): - try: - output = subprocess.check_output( - ["nix-channel", "--list"], start_new_session=True - ) - output = output.decode("utf-8") - first_line = output.split("\n", maxsplit=1)[0] - return first_line.startswith("nixos") and ( - first_line.endswith("nixos-22.05") - ) - except subprocess.CalledProcessError: - return False - - def migrate(self): - # Change the channel and update them. - # Also, go to /etc/nixos directory and make a git pull - current_working_directory = os.getcwd() - try: - print("Changing channel") - os.chdir("/etc/nixos") - subprocess.check_output( - [ - "nix-channel", - "--add", - "https://channel.selfprivacy.org/nixos-selfpricacy", - "nixos", - ] - ) - subprocess.check_output(["nix-channel", "--update"]) - nixos_config_branch = subprocess.check_output( - ["git", "rev-parse", "--abbrev-ref", "HEAD"], start_new_session=True - ) - if nixos_config_branch.decode("utf-8").strip() == "api-redis": - print("Also changing nixos-config branch from api-redis to master") - subprocess.check_output(["git", "checkout", "master"]) - subprocess.check_output(["git", "pull"]) - os.chdir(current_working_directory) - except subprocess.CalledProcessError: - os.chdir(current_working_directory) - print("Error") diff --git a/selfprivacy_api/migrations/prepare_for_nixos_2305.py b/selfprivacy_api/migrations/prepare_for_nixos_2305.py deleted file mode 100644 index d9fed28..0000000 --- a/selfprivacy_api/migrations/prepare_for_nixos_2305.py +++ /dev/null @@ -1,58 +0,0 @@ -import os -import subprocess - -from selfprivacy_api.migrations.migration import Migration - - -class MigrateToSelfprivacyChannelFrom2211(Migration): - """Migrate to selfprivacy Nix channel. - For some reason NixOS 22.11 servers initialized with the nixos channel instead of selfprivacy. - This stops us from upgrading to NixOS 23.05 - """ - - def get_migration_name(self): - return "migrate_to_selfprivacy_channel_from_2211" - - def get_migration_description(self): - return "Migrate to selfprivacy Nix channel from NixOS 22.11." - - def is_migration_needed(self): - try: - output = subprocess.check_output( - ["nix-channel", "--list"], start_new_session=True - ) - output = output.decode("utf-8") - first_line = output.split("\n", maxsplit=1)[0] - return first_line.startswith("nixos") and ( - first_line.endswith("nixos-22.11") - ) - except subprocess.CalledProcessError: - return False - - def migrate(self): - # Change the channel and update them. - # Also, go to /etc/nixos directory and make a git pull - current_working_directory = os.getcwd() - try: - print("Changing channel") - os.chdir("/etc/nixos") - subprocess.check_output( - [ - "nix-channel", - "--add", - "https://channel.selfprivacy.org/nixos-selfpricacy", - "nixos", - ] - ) - subprocess.check_output(["nix-channel", "--update"]) - nixos_config_branch = subprocess.check_output( - ["git", "rev-parse", "--abbrev-ref", "HEAD"], start_new_session=True - ) - if nixos_config_branch.decode("utf-8").strip() == "api-redis": - print("Also changing nixos-config branch from api-redis to master") - subprocess.check_output(["git", "checkout", "master"]) - subprocess.check_output(["git", "pull"]) - os.chdir(current_working_directory) - except subprocess.CalledProcessError: - os.chdir(current_working_directory) - print("Error") diff --git a/selfprivacy_api/migrations/providers.py b/selfprivacy_api/migrations/providers.py deleted file mode 100644 index 2cd5d5e..0000000 --- a/selfprivacy_api/migrations/providers.py +++ /dev/null @@ -1,43 +0,0 @@ -from selfprivacy_api.migrations.migration import Migration -from selfprivacy_api.utils import ReadUserData, WriteUserData - - -class CreateProviderFields(Migration): - """Unhardcode providers""" - - def get_migration_name(self): - return "create_provider_fields" - - def get_migration_description(self): - return "Add DNS, backup and server provider fields to enable user to choose between different clouds and to make the deployment adapt to these preferences." - - def is_migration_needed(self): - try: - with ReadUserData() as userdata: - return "dns" not in userdata - except Exception as e: - print(e) - return False - - def migrate(self): - # Write info about providers to userdata.json - try: - with WriteUserData() as userdata: - userdata["dns"] = { - "provider": "CLOUDFLARE", - "apiKey": userdata["cloudflare"]["apiKey"], - } - userdata["server"] = { - "provider": "HETZNER", - } - userdata["backup"] = { - "provider": "BACKBLAZE", - "accountId": userdata["backblaze"]["accountId"], - "accountKey": userdata["backblaze"]["accountKey"], - "bucket": userdata["backblaze"]["bucket"], - } - - print("Done") - except Exception as e: - print(e) - print("Error migrating provider fields") diff --git a/selfprivacy_api/migrations/redis_tokens.py b/selfprivacy_api/migrations/redis_tokens.py deleted file mode 100644 index c5eea2f..0000000 --- a/selfprivacy_api/migrations/redis_tokens.py +++ /dev/null @@ -1,48 +0,0 @@ -from selfprivacy_api.migrations.migration import Migration - -from selfprivacy_api.repositories.tokens.json_tokens_repository import ( - JsonTokensRepository, -) -from selfprivacy_api.repositories.tokens.redis_tokens_repository import ( - RedisTokensRepository, -) -from selfprivacy_api.repositories.tokens.abstract_tokens_repository import ( - AbstractTokensRepository, -) - - -class LoadTokensToRedis(Migration): - """Load Json tokens into Redis""" - - def get_migration_name(self): - return "load_tokens_to_redis" - - def get_migration_description(self): - return "Loads access tokens and recovery keys from legacy json file into redis token storage" - - def is_repo_empty(self, repo: AbstractTokensRepository) -> bool: - if repo.get_tokens() != []: - return False - if repo.get_recovery_key() is not None: - return False - return True - - def is_migration_needed(self): - try: - if not self.is_repo_empty(JsonTokensRepository()) and self.is_repo_empty( - RedisTokensRepository() - ): - return True - except Exception as e: - print(e) - return False - - def migrate(self): - # Write info about providers to userdata.json - try: - RedisTokensRepository().clone(JsonTokensRepository()) - - print("Done") - except Exception as e: - print(e) - print("Error migrating access tokens from json to redis") diff --git a/selfprivacy_api/migrations/write_token_to_redis.py b/selfprivacy_api/migrations/write_token_to_redis.py new file mode 100644 index 0000000..aab4f72 --- /dev/null +++ b/selfprivacy_api/migrations/write_token_to_redis.py @@ -0,0 +1,63 @@ +from datetime import datetime +from typing import Optional +from selfprivacy_api.migrations.migration import Migration +from selfprivacy_api.models.tokens.token import Token + +from selfprivacy_api.repositories.tokens.redis_tokens_repository import ( + RedisTokensRepository, +) +from selfprivacy_api.repositories.tokens.abstract_tokens_repository import ( + AbstractTokensRepository, +) +from selfprivacy_api.utils import ReadUserData, UserDataFiles + + +class WriteTokenToRedis(Migration): + """Load Json tokens into Redis""" + + def get_migration_name(self): + return "write_token_to_redis" + + def get_migration_description(self): + return "Loads the initial token into redis token storage" + + def is_repo_empty(self, repo: AbstractTokensRepository) -> bool: + if repo.get_tokens() != []: + return False + return True + + def get_token_from_json(self) -> Optional[Token]: + try: + with ReadUserData(UserDataFiles.SECRETS) as userdata: + return Token( + token=userdata["api"]["token"], + device_name="Initial device", + created_at=datetime.now(), + ) + except Exception as e: + print(e) + return None + + def is_migration_needed(self): + try: + if self.get_token_from_json() is not None and self.is_repo_empty( + RedisTokensRepository() + ): + return True + except Exception as e: + print(e) + return False + + def migrate(self): + # Write info about providers to userdata.json + try: + token = self.get_token_from_json() + if token is None: + print("No token found in secrets.json") + return + RedisTokensRepository()._store_token(token) + + print("Done") + except Exception as e: + print(e) + print("Error migrating access tokens from json to redis") diff --git a/selfprivacy_api/repositories/tokens/__init__.py b/selfprivacy_api/repositories/tokens/__init__.py index 9941bdc..e69de29 100644 --- a/selfprivacy_api/repositories/tokens/__init__.py +++ b/selfprivacy_api/repositories/tokens/__init__.py @@ -1,8 +0,0 @@ -from selfprivacy_api.repositories.tokens.abstract_tokens_repository import ( - AbstractTokensRepository, -) -from selfprivacy_api.repositories.tokens.json_tokens_repository import ( - JsonTokensRepository, -) - -repository = JsonTokensRepository() diff --git a/selfprivacy_api/repositories/tokens/json_tokens_repository.py b/selfprivacy_api/repositories/tokens/json_tokens_repository.py deleted file mode 100644 index be753ea..0000000 --- a/selfprivacy_api/repositories/tokens/json_tokens_repository.py +++ /dev/null @@ -1,153 +0,0 @@ -""" -temporary legacy -""" -from typing import Optional -from datetime import datetime, timezone - -from selfprivacy_api.utils import UserDataFiles, WriteUserData, ReadUserData -from selfprivacy_api.models.tokens.token import Token -from selfprivacy_api.models.tokens.recovery_key import RecoveryKey -from selfprivacy_api.models.tokens.new_device_key import NewDeviceKey -from selfprivacy_api.repositories.tokens.exceptions import ( - TokenNotFound, -) -from selfprivacy_api.repositories.tokens.abstract_tokens_repository import ( - AbstractTokensRepository, -) - - -DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%f" - - -class JsonTokensRepository(AbstractTokensRepository): - def get_tokens(self) -> list[Token]: - """Get the tokens""" - tokens_list = [] - - with ReadUserData(UserDataFiles.TOKENS) as tokens_file: - for userdata_token in tokens_file["tokens"]: - tokens_list.append( - Token( - token=userdata_token["token"], - device_name=userdata_token["name"], - created_at=userdata_token["date"], - ) - ) - - return tokens_list - - def _store_token(self, new_token: Token): - """Store a token directly""" - with WriteUserData(UserDataFiles.TOKENS) as tokens_file: - tokens_file["tokens"].append( - { - "token": new_token.token, - "name": new_token.device_name, - "date": new_token.created_at.strftime(DATETIME_FORMAT), - } - ) - - def delete_token(self, input_token: Token) -> None: - """Delete the token""" - with WriteUserData(UserDataFiles.TOKENS) as tokens_file: - for userdata_token in tokens_file["tokens"]: - if userdata_token["token"] == input_token.token: - tokens_file["tokens"].remove(userdata_token) - return - - raise TokenNotFound("Token not found!") - - def __key_date_from_str(self, date_string: str) -> datetime: - if date_string is None or date_string == "": - return None - # we assume that we store dates in json as naive utc - utc_no_tz = datetime.fromisoformat(date_string) - utc_with_tz = utc_no_tz.replace(tzinfo=timezone.utc) - return utc_with_tz - - def __date_from_tokens_file( - self, tokens_file: object, tokenfield: str, datefield: str - ): - date_string = tokens_file[tokenfield].get(datefield) - return self.__key_date_from_str(date_string) - - def get_recovery_key(self) -> Optional[RecoveryKey]: - """Get the recovery key""" - with ReadUserData(UserDataFiles.TOKENS) as tokens_file: - - if ( - "recovery_token" not in tokens_file - or tokens_file["recovery_token"] is None - ): - return - - recovery_key = RecoveryKey( - key=tokens_file["recovery_token"].get("token"), - created_at=self.__date_from_tokens_file( - tokens_file, "recovery_token", "date" - ), - expires_at=self.__date_from_tokens_file( - tokens_file, "recovery_token", "expiration" - ), - uses_left=tokens_file["recovery_token"].get("uses_left"), - ) - - return recovery_key - - def _store_recovery_key(self, recovery_key: RecoveryKey) -> None: - with WriteUserData(UserDataFiles.TOKENS) as tokens_file: - key_expiration: Optional[str] = None - if recovery_key.expires_at is not None: - key_expiration = recovery_key.expires_at.strftime(DATETIME_FORMAT) - tokens_file["recovery_token"] = { - "token": recovery_key.key, - "date": recovery_key.created_at.strftime(DATETIME_FORMAT), - "expiration": key_expiration, - "uses_left": recovery_key.uses_left, - } - - def _decrement_recovery_token(self): - """Decrement recovery key use count by one""" - if self.is_recovery_key_valid(): - with WriteUserData(UserDataFiles.TOKENS) as tokens: - if tokens["recovery_token"]["uses_left"] is not None: - tokens["recovery_token"]["uses_left"] -= 1 - - def _delete_recovery_key(self) -> None: - """Delete the recovery key""" - with WriteUserData(UserDataFiles.TOKENS) as tokens_file: - if "recovery_token" in tokens_file: - del tokens_file["recovery_token"] - return - - def _store_new_device_key(self, new_device_key: NewDeviceKey) -> None: - with WriteUserData(UserDataFiles.TOKENS) as tokens_file: - tokens_file["new_device"] = { - "token": new_device_key.key, - "date": new_device_key.created_at.strftime(DATETIME_FORMAT), - "expiration": new_device_key.expires_at.strftime(DATETIME_FORMAT), - } - - def delete_new_device_key(self) -> None: - """Delete the new device key""" - with WriteUserData(UserDataFiles.TOKENS) as tokens_file: - if "new_device" in tokens_file: - del tokens_file["new_device"] - return - - def _get_stored_new_device_key(self) -> Optional[NewDeviceKey]: - """Retrieves new device key that is already stored.""" - with ReadUserData(UserDataFiles.TOKENS) as tokens_file: - if "new_device" not in tokens_file or tokens_file["new_device"] is None: - return - - new_device_key = NewDeviceKey( - key=tokens_file["new_device"]["token"], - created_at=self.__date_from_tokens_file( - tokens_file, "new_device", "date" - ), - expires_at=self.__date_from_tokens_file( - tokens_file, "new_device", "expiration" - ), - ) - return new_device_key diff --git a/selfprivacy_api/services/__init__.py b/selfprivacy_api/services/__init__.py index 50ef76a..dd0a5b4 100644 --- a/selfprivacy_api/services/__init__.py +++ b/selfprivacy_api/services/__init__.py @@ -3,7 +3,7 @@ import typing from selfprivacy_api.services.bitwarden import Bitwarden from selfprivacy_api.services.gitea import Gitea -from selfprivacy_api.services.jitsi import Jitsi +from selfprivacy_api.services.jitsimeet import JitsiMeet from selfprivacy_api.services.mailserver import MailServer from selfprivacy_api.services.nextcloud import Nextcloud from selfprivacy_api.services.pleroma import Pleroma @@ -18,7 +18,7 @@ services: list[Service] = [ Nextcloud(), Pleroma(), Ocserv(), - Jitsi(), + JitsiMeet(), ] diff --git a/selfprivacy_api/services/generic_service_mover.py b/selfprivacy_api/services/generic_service_mover.py index cfb0385..819b48e 100644 --- a/selfprivacy_api/services/generic_service_mover.py +++ b/selfprivacy_api/services/generic_service_mover.py @@ -244,9 +244,11 @@ def move_service( progress=95, ) with WriteUserData() as user_data: - if userdata_location not in user_data: - user_data[userdata_location] = {} - user_data[userdata_location]["location"] = volume.name + if "modules" not in user_data: + user_data["modules"] = {} + if userdata_location not in user_data["modules"]: + user_data["modules"][userdata_location] = {} + user_data["modules"][userdata_location]["location"] = volume.name # Start service service.start() Jobs.update( diff --git a/selfprivacy_api/services/jitsi/__init__.py b/selfprivacy_api/services/jitsimeet/__init__.py similarity index 90% rename from selfprivacy_api/services/jitsi/__init__.py rename to selfprivacy_api/services/jitsimeet/__init__.py index d5677cc..30663f9 100644 --- a/selfprivacy_api/services/jitsi/__init__.py +++ b/selfprivacy_api/services/jitsimeet/__init__.py @@ -1,4 +1,4 @@ -"""Class representing Jitsi service""" +"""Class representing Jitsi Meet service""" import base64 import subprocess import typing @@ -11,26 +11,26 @@ from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceS from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain from selfprivacy_api.utils.block_devices import BlockDevice import selfprivacy_api.utils.network as network_utils -from selfprivacy_api.services.jitsi.icon import JITSI_ICON +from selfprivacy_api.services.jitsimeet.icon import JITSI_ICON -class Jitsi(Service): +class JitsiMeet(Service): """Class representing Jitsi service""" @staticmethod def get_id() -> str: """Return service id.""" - return "jitsi" + return "jitsi-meet" @staticmethod def get_display_name() -> str: """Return service display name.""" - return "Jitsi" + return "JitsiMeet" @staticmethod def get_description() -> str: """Return service description.""" - return "Jitsi is a free and open-source video conferencing solution." + return "Jitsi Meet is a free and open-source video conferencing solution." @staticmethod def get_svg_icon() -> str: @@ -123,4 +123,4 @@ class Jitsi(Service): ] def move_to_volume(self, volume: BlockDevice) -> Job: - raise NotImplementedError("jitsi service is not movable") + raise NotImplementedError("jitsi-meet service is not movable") diff --git a/selfprivacy_api/services/jitsi/icon.py b/selfprivacy_api/services/jitsimeet/icon.py similarity index 100% rename from selfprivacy_api/services/jitsi/icon.py rename to selfprivacy_api/services/jitsimeet/icon.py diff --git a/selfprivacy_api/services/mailserver/__init__.py b/selfprivacy_api/services/mailserver/__init__.py index e36a694..536b444 100644 --- a/selfprivacy_api/services/mailserver/__init__.py +++ b/selfprivacy_api/services/mailserver/__init__.py @@ -21,7 +21,7 @@ class MailServer(Service): @staticmethod def get_id() -> str: - return "email" + return "simple-nixos-mailserver" @staticmethod def get_display_name() -> str: @@ -173,7 +173,7 @@ class MailServer(Service): volume, job, FolderMoveNames.default_foldermoves(self), - "email", + "simple-nixos-mailserver", ) return job diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index b44f3a9..f41c821 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -225,9 +225,13 @@ class Service(ABC): return root_device with utils.ReadUserData() as userdata: if userdata.get("useBinds", False): - return userdata.get(cls.get_id(), {}).get( - "location", - root_device, + return ( + userdata.get("modules", {}) + .get(cls.get_id(), {}) + .get( + "location", + root_device, + ) ) else: return root_device diff --git a/selfprivacy_api/utils/__init__.py b/selfprivacy_api/utils/__init__.py index 08bc61f..779bdf6 100644 --- a/selfprivacy_api/utils/__init__.py +++ b/selfprivacy_api/utils/__init__.py @@ -9,10 +9,8 @@ import portalocker import typing -USERDATA_FILE = "/etc/nixos/userdata/userdata.json" -TOKENS_FILE = "/etc/nixos/userdata/tokens.json" -JOBS_FILE = "/etc/nixos/userdata/jobs.json" -DOMAIN_FILE = "/var/domain" +USERDATA_FILE = "/etc/nixos/userdata.json" +SECRETS_FILE = "/etc/selfprivacy/secrets.json" DKIM_DIR = "/var/dkim/" @@ -20,15 +18,13 @@ class UserDataFiles(Enum): """Enum for userdata files""" USERDATA = 0 - TOKENS = 1 - JOBS = 2 + SECRETS = 3 def get_domain(): - """Get domain from /var/domain without trailing new line""" - with open(DOMAIN_FILE, "r", encoding="utf-8") as domain_file: - domain = domain_file.readline().rstrip() - return domain + """Get domain from userdata.json""" + with ReadUserData() as user_data: + return user_data["domain"] class WriteUserData(object): @@ -37,14 +33,12 @@ class WriteUserData(object): def __init__(self, file_type=UserDataFiles.USERDATA): if file_type == UserDataFiles.USERDATA: self.userdata_file = open(USERDATA_FILE, "r+", encoding="utf-8") - elif file_type == UserDataFiles.TOKENS: - self.userdata_file = open(TOKENS_FILE, "r+", encoding="utf-8") - elif file_type == UserDataFiles.JOBS: + elif file_type == UserDataFiles.SECRETS: # Make sure file exists - if not os.path.exists(JOBS_FILE): - with open(JOBS_FILE, "w", encoding="utf-8") as jobs_file: - jobs_file.write("{}") - self.userdata_file = open(JOBS_FILE, "r+", encoding="utf-8") + if not os.path.exists(SECRETS_FILE): + with open(SECRETS_FILE, "w", encoding="utf-8") as secrets_file: + secrets_file.write("{}") + self.userdata_file = open(SECRETS_FILE, "r+", encoding="utf-8") else: raise ValueError("Unknown file type") portalocker.lock(self.userdata_file, portalocker.LOCK_EX) @@ -68,14 +62,11 @@ class ReadUserData(object): def __init__(self, file_type=UserDataFiles.USERDATA): if file_type == UserDataFiles.USERDATA: self.userdata_file = open(USERDATA_FILE, "r", encoding="utf-8") - elif file_type == UserDataFiles.TOKENS: - self.userdata_file = open(TOKENS_FILE, "r", encoding="utf-8") - elif file_type == UserDataFiles.JOBS: - # Make sure file exists - if not os.path.exists(JOBS_FILE): - with open(JOBS_FILE, "w", encoding="utf-8") as jobs_file: - jobs_file.write("{}") - self.userdata_file = open(JOBS_FILE, "r", encoding="utf-8") + elif file_type == UserDataFiles.SECRETS: + if not os.path.exists(SECRETS_FILE): + with open(SECRETS_FILE, "w", encoding="utf-8") as secrets_file: + secrets_file.write("{}") + self.userdata_file = open(SECRETS_FILE, "r", encoding="utf-8") else: raise ValueError("Unknown file type") portalocker.lock(self.userdata_file, portalocker.LOCK_SH) diff --git a/selfprivacy_api/utils/huey.py b/selfprivacy_api/utils/huey.py index a7ff492..8e09446 100644 --- a/selfprivacy_api/utils/huey.py +++ b/selfprivacy_api/utils/huey.py @@ -2,14 +2,15 @@ import os from huey import SqliteHuey -HUEY_DATABASE = "/etc/nixos/userdata/tasks.db" +HUEY_DATABASE = "/etc/selfprivacy/tasks.db" # Singleton instance containing the huey database. test_mode = os.environ.get("TEST_MODE") huey = SqliteHuey( - HUEY_DATABASE, + "selfprivacy-api", + filename=HUEY_DATABASE if not test_mode else None, immediate=test_mode == "true", utc=True, ) diff --git a/tests/common.py b/tests/common.py index 8061721..ae3f0d0 100644 --- a/tests/common.py +++ b/tests/common.py @@ -7,28 +7,28 @@ RECOVERY_KEY_VALIDATION_DATETIME = "selfprivacy_api.models.tokens.time.datetime" DEVICE_KEY_VALIDATION_DATETIME = RECOVERY_KEY_VALIDATION_DATETIME -def five_minutes_into_future_naive(): - return datetime.now() + timedelta(minutes=5) +def ten_minutes_into_future_naive(): + return datetime.now() + timedelta(minutes=10) -def five_minutes_into_future_naive_utc(): - return datetime.utcnow() + timedelta(minutes=5) +def ten_minutes_into_future_naive_utc(): + return datetime.utcnow() + timedelta(minutes=10) -def five_minutes_into_future(): - return datetime.now(timezone.utc) + timedelta(minutes=5) +def ten_minutes_into_future(): + return datetime.now(timezone.utc) + timedelta(minutes=10) -def five_minutes_into_past_naive(): - return datetime.now() - timedelta(minutes=5) +def ten_minutes_into_past_naive(): + return datetime.now() - timedelta(minutes=10) -def five_minutes_into_past_naive_utc(): - return datetime.utcnow() - timedelta(minutes=5) +def ten_minutes_into_past_naive_utc(): + return datetime.utcnow() - timedelta(minutes=10) -def five_minutes_into_past(): - return datetime.now(timezone.utc) - timedelta(minutes=5) +def ten_minutes_into_past(): + return datetime.now(timezone.utc) - timedelta(minutes=10) class NearFuture(datetime): diff --git a/tests/conftest.py b/tests/conftest.py index fddd32f..e651c08 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -9,6 +9,7 @@ from os import path from os import makedirs from typing import Generator from fastapi.testclient import TestClient +from selfprivacy_api.models.tokens.token import Token from selfprivacy_api.utils.huey import huey @@ -16,22 +17,14 @@ import selfprivacy_api.services as services from selfprivacy_api.services import get_service_by_id, Service from selfprivacy_api.services.test_service import DummyService -from selfprivacy_api.models.tokens.token import Token -from selfprivacy_api.repositories.tokens.json_tokens_repository import ( - JsonTokensRepository, -) from selfprivacy_api.repositories.tokens.redis_tokens_repository import ( RedisTokensRepository, ) -from tests.common import read_json TESTFILE_BODY = "testytest!" TESTFILE_2_BODY = "testissimo!" -EMPTY_TOKENS_JSON = ' {"tokens": []}' - - TOKENS_FILE_CONTENTS = { "tokens": [ { @@ -47,6 +40,19 @@ TOKENS_FILE_CONTENTS = { ] } +TOKENS = [ + Token( + token="TEST_TOKEN", + device_name="test_token", + created_at=datetime.datetime(2022, 1, 14, 8, 31, 10, 789314), + ), + Token( + token="TEST_TOKEN2", + device_name="test_token2", + created_at=datetime.datetime(2022, 1, 14, 8, 31, 10, 789314), + ), +] + DEVICE_WE_AUTH_TESTS_WITH = TOKENS_FILE_CONTENTS["tokens"][0] @@ -58,25 +64,6 @@ def global_data_dir(): return path.join(path.dirname(__file__), "data") -@pytest.fixture -def empty_tokens(mocker, tmpdir): - tokenfile = tmpdir / "empty_tokens.json" - with open(tokenfile, "w") as file: - file.write(EMPTY_TOKENS_JSON) - mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=tokenfile) - assert read_json(tokenfile)["tokens"] == [] - return tmpdir - - -@pytest.fixture -def empty_json_repo(empty_tokens): - repo = JsonTokensRepository() - for token in repo.get_tokens(): - repo.delete_token(token) - assert repo.get_tokens() == [] - return repo - - @pytest.fixture def empty_redis_repo(): repo = RedisTokensRepository() @@ -86,25 +73,14 @@ def empty_redis_repo(): @pytest.fixture -def tokens_file(empty_redis_repo, tmpdir): - """A state with tokens""" - repo = empty_redis_repo - for token in TOKENS_FILE_CONTENTS["tokens"]: - repo._store_token( - Token( - token=token["token"], - device_name=token["name"], - created_at=token["date"], - ) - ) - return repo - - -@pytest.fixture -def jobs_file(mocker, shared_datadir): - """Mock tokens file.""" - mock = mocker.patch("selfprivacy_api.utils.JOBS_FILE", shared_datadir / "jobs.json") - return mock +def redis_repo_with_tokens(): + repo = RedisTokensRepository() + repo.reset() + for token in TOKENS: + repo._store_token(token) + assert sorted(repo.get_tokens(), key=lambda x: x.token) == sorted( + TOKENS, key=lambda x: x.token + ) @pytest.fixture @@ -131,14 +107,14 @@ def huey_database(mocker, shared_datadir): @pytest.fixture -def client(tokens_file, huey_database, jobs_file): +def client(huey_database, redis_repo_with_tokens): from selfprivacy_api.app import app return TestClient(app) @pytest.fixture -def authorized_client(tokens_file, huey_database, jobs_file): +def authorized_client(huey_database, redis_repo_with_tokens): """Authorized test client fixture.""" from selfprivacy_api.app import app @@ -150,7 +126,7 @@ def authorized_client(tokens_file, huey_database, jobs_file): @pytest.fixture -def wrong_auth_client(tokens_file, huey_database, jobs_file): +def wrong_auth_client(huey_database, redis_repo_with_tokens): """Wrong token test client fixture.""" from selfprivacy_api.app import app diff --git a/tests/data/jobs.json b/tests/data/jobs.json deleted file mode 100644 index 0967ef4..0000000 --- a/tests/data/jobs.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/tests/data/tokens.json b/tests/data/tokens.json deleted file mode 100644 index 9be9d02..0000000 --- a/tests/data/tokens.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "tokens": [ - { - "token": "TEST_TOKEN", - "name": "test_token", - "date": "2022-01-14 08:31:10.789314" - }, - { - "token": "TEST_TOKEN2", - "name": "test_token2", - "date": "2022-01-14 08:31:10.789314" - } - ] -} \ No newline at end of file diff --git a/tests/data/turned_on.json b/tests/data/turned_on.json index 1b6219d..badf57b 100644 --- a/tests/data/turned_on.json +++ b/tests/data/turned_on.json @@ -1,40 +1,20 @@ { - "api": {"token": "TEST_TOKEN", "enableSwagger": false}, - "databasePassword": "PASSWORD", - "domain": "test.tld", + "dns": { + "provider": "CLOUDFLARE", + "useStagingACME": false + }, + "server": { + "provider": "HETZNER" + }, + "domain": "test-domain.tld", "hashedMasterPassword": "HASHED_PASSWORD", "hostname": "test-instance", - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": ["ssh-ed25519 KEY test@pc"] - }, + "timezone": "Etc/UTC", "username": "tester", - "autoUpgrade": {"enable": true, "allowReboot": true}, "useBinds": true, - "timezone": "Europe/Moscow", - "sshKeys": ["ssh-rsa KEY test@pc"], - "dns": {"provider": "CLOUDFLARE", "apiKey": "TOKEN"}, - "server": {"provider": "HETZNER"}, - "modules": { - "bitwarden": {"enable": true}, - "gitea": {"enable": true}, - "ocserv": {"enable": true}, - "pleroma": {"enable": true}, - "jitsi": {"enable": true}, - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - } - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, + "sshKeys": [ + "ssh-rsa KEY test@pc" + ], "users": [ { "username": "user1", @@ -51,5 +31,57 @@ "hashedPassword": "HASHED_PASSWORD_3", "sshKeys": ["ssh-rsa KEY user3@pc"] } - ] + ], + "autoUpgrade": { + "enable": true, + "allowReboot": true + }, + "modules": { + "bitwarden": { + "enable": true, + "location": "sdb" + }, + "gitea": { + "enable": true, + "location": "sdb" + }, + "jitsi-meet": { + "enable": true + }, + "nextcloud": { + "enable": true, + "location": "sdb" + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true, + "location": "sdb" + }, + "simple-nixos-mailserver": { + "enable": true, + "location": "sdb" + } + }, + "volumes": [ + { + "device": "/dev/sdb", + "mountPoint": "/volumes/sdb", + "fsType": "ext4" + } + ], + "ssh": { + "enable": true, + "passwordAuthentication": true, + "rootKeys": [ + "ssh-ed25519 KEY test@pc" + ] + }, + "backup": { + "provider": "BACKBLAZE", + "accountId": "ID", + "accountKey": "KEY", + "bucket": "selfprivacy" + } } diff --git a/tests/test_block_device_utils.py b/tests/test_block_device_utils.py index 7a85c50..41c30c8 100644 --- a/tests/test_block_device_utils.py +++ b/tests/test_block_device_utils.py @@ -67,7 +67,7 @@ def only_root_in_userdata(mocker, datadir): read_json(datadir / "only_root.json")["volumes"][0]["mountPoint"] == "/volumes/sda1" ) - assert read_json(datadir / "only_root.json")["volumes"][0]["filesystem"] == "ext4" + assert read_json(datadir / "only_root.json")["volumes"][0]["fsType"] == "ext4" return datadir diff --git a/tests/test_block_device_utils/no_devices.json b/tests/test_block_device_utils/no_devices.json index c395b21..b23d99f 100644 --- a/tests/test_block_device_utils/no_devices.json +++ b/tests/test_block_device_utils/no_devices.json @@ -1,59 +1,59 @@ { - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false + "dns": { + "provider": "CLOUDFLARE", + "useStagingACME": false }, - "bitwarden": { - "enable": true + "server": { + "provider": "HETZNER" }, - "databasePassword": "PASSWORD", - "domain": "test.tld", + "domain": "test-domain.tld", "hashedMasterPassword": "HASHED_PASSWORD", "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true + "timezone": "Etc/UTC", + "username": "tester", + "useBinds": true, + "sshKeys": [ + "ssh-rsa KEY test@pc" + ], + "users": [], + "autoUpgrade": { + "enable": true, + "allowReboot": true }, - "resticPassword": "PASS", + "modules": { + "bitwarden": { + "enable": true, + "location": "sdb" + }, + "gitea": { + "enable": true, + "location": "sdb" + }, + "jitsi-meet": { + "enable": true + }, + "nextcloud": { + "enable": true, + "location": "sdb" + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true, + "location": "sdb" + }, + "simple-nixos-mailserver": { + "enable": true, + "location": "sdb" + } + }, + "volumes": [], "ssh": { "enable": true, "passwordAuthentication": true, "rootKeys": [ "ssh-ed25519 KEY test@pc" ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, - "volumes": [ - ] + } } diff --git a/tests/test_block_device_utils/only_root.json b/tests/test_block_device_utils/only_root.json index 1026ed0..ab4a196 100644 --- a/tests/test_block_device_utils/only_root.json +++ b/tests/test_block_device_utils/only_root.json @@ -1,64 +1,65 @@ { - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false + "dns": { + "provider": "CLOUDFLARE", + "useStagingACME": false }, - "bitwarden": { - "enable": true + "server": { + "provider": "HETZNER" }, - "databasePassword": "PASSWORD", - "domain": "test.tld", + "domain": "test-domain.tld", "hashedMasterPassword": "HASHED_PASSWORD", "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true + "timezone": "Etc/UTC", + "username": "tester", + "useBinds": true, + "sshKeys": [ + "ssh-rsa KEY test@pc" + ], + "users": [], + "autoUpgrade": { + "enable": true, + "allowReboot": true }, - "resticPassword": "PASS", + "modules": { + "bitwarden": { + "enable": true, + "location": "sdb" + }, + "gitea": { + "enable": true, + "location": "sdb" + }, + "jitsi-meet": { + "enable": true + }, + "nextcloud": { + "enable": true, + "location": "sdb" + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true, + "location": "sdb" + }, + "simple-nixos-mailserver": { + "enable": true, + "location": "sdb" + } + }, + "volumes": [ + { + "device": "/dev/sda1", + "mountPoint": "/volumes/sda1", + "fsType": "ext4" + } + ], "ssh": { "enable": true, "passwordAuthentication": true, "rootKeys": [ "ssh-ed25519 KEY test@pc" ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "volumes": [ - { - "device": "/dev/sda1", - "mountPoint": "/volumes/sda1", - "filesystem": "ext4" - } - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" } } diff --git a/tests/test_block_device_utils/undefined.json b/tests/test_block_device_utils/undefined.json index f5edda8..21acd70 100644 --- a/tests/test_block_device_utils/undefined.json +++ b/tests/test_block_device_utils/undefined.json @@ -1,57 +1,58 @@ { - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false + "dns": { + "provider": "CLOUDFLARE", + "useStagingACME": false }, - "bitwarden": { - "enable": true + "server": { + "provider": "HETZNER" }, - "databasePassword": "PASSWORD", - "domain": "test.tld", + "domain": "test-domain.tld", "hashedMasterPassword": "HASHED_PASSWORD", "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true + "timezone": "Etc/UTC", + "username": "tester", + "useBinds": true, + "sshKeys": [ + "ssh-rsa KEY test@pc" + ], + "users": [], + "autoUpgrade": { + "enable": true, + "allowReboot": true + }, + "modules": { + "bitwarden": { + "enable": true, + "location": "sdb" + }, + "gitea": { + "enable": true, + "location": "sdb" + }, + "jitsi-meet": { + "enable": true + }, + "nextcloud": { + "enable": true, + "location": "sdb" + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true, + "location": "sdb" + }, + "simple-nixos-mailserver": { + "enable": true, + "location": "sdb" + } }, - "resticPassword": "PASS", "ssh": { "enable": true, "passwordAuthentication": true, "rootKeys": [ "ssh-ed25519 KEY test@pc" ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" } } diff --git a/tests/test_dkim.py b/tests/test_dkim.py index 949bb19..0adf0a7 100644 --- a/tests/test_dkim.py +++ b/tests/test_dkim.py @@ -12,7 +12,7 @@ DKIM_FILE_CONTENT = b'selector._domainkey\tIN\tTXT\t( "v=DKIM1; k=rsa; "\n\t "p @pytest.fixture -def dkim_file(mocker, domain_file, tmpdir): +def dkim_file(mocker, tmpdir, generic_userdata): domain = get_domain() assert domain is not None assert domain != "" @@ -27,14 +27,6 @@ def dkim_file(mocker, domain_file, tmpdir): return dkim_path -@pytest.fixture -def domain_file(mocker): - # TODO: move to conftest. Challenge: it does not behave with "/" like pytest datadir does - domain_path = path.join(global_data_dir(), "domain") - mocker.patch("selfprivacy_api.utils.DOMAIN_FILE", domain_path) - return domain_path - - @pytest.fixture def no_dkim_file(dkim_file): os.remove(dkim_file) @@ -45,7 +37,7 @@ def no_dkim_file(dkim_file): ############################################################################### -def test_get_dkim_key(domain_file, dkim_file): +def test_get_dkim_key(dkim_file): """Test DKIM key""" dkim_key = get_dkim_key("test-domain.tld") assert ( @@ -54,7 +46,7 @@ def test_get_dkim_key(domain_file, dkim_file): ) -def test_no_dkim_key(domain_file, no_dkim_file): +def test_no_dkim_key(no_dkim_file): """Test no DKIM key""" dkim_key = get_dkim_key("test-domain.tld") assert dkim_key is None diff --git a/tests/test_graphql/test_api.py b/tests/test_graphql/test_api.py index c252d44..af04685 100644 --- a/tests/test_graphql/test_api.py +++ b/tests/test_graphql/test_api.py @@ -9,7 +9,7 @@ from tests.test_graphql.test_api_recovery import API_RECOVERY_QUERY from tests.test_graphql.test_api_version import API_VERSION_QUERY -def test_graphql_get_entire_api_data(authorized_client, tokens_file): +def test_graphql_get_entire_api_data(authorized_client): response = authorized_client.post( "/graphql", json={ diff --git a/tests/test_graphql/test_api_devices.py b/tests/test_graphql/test_api_devices.py index ef77414..d521861 100644 --- a/tests/test_graphql/test_api_devices.py +++ b/tests/test_graphql/test_api_devices.py @@ -7,7 +7,7 @@ from tests.common import ( NearFuture, generate_api_query, ) -from tests.conftest import DEVICE_WE_AUTH_TESTS_WITH, TOKENS_FILE_CONTENTS +from tests.conftest import DEVICE_WE_AUTH_TESTS_WITH from tests.test_graphql.common import ( get_data, assert_empty, @@ -66,11 +66,11 @@ def graphql_authorize_new_device(client, mnemonic_key, device_name) -> str: return token -def test_graphql_tokens_info(authorized_client, tokens_file): +def test_graphql_tokens_info(authorized_client): assert_original(authorized_client) -def test_graphql_tokens_info_unauthorized(client, tokens_file): +def test_graphql_tokens_info_unauthorized(client): response = request_devices(client) assert_empty(response) @@ -88,7 +88,7 @@ mutation DeleteToken($device: String!) { """ -def test_graphql_delete_token_unauthorized(client, tokens_file): +def test_graphql_delete_token_unauthorized(client): response = client.post( "/graphql", json={ @@ -101,7 +101,7 @@ def test_graphql_delete_token_unauthorized(client, tokens_file): assert_empty(response) -def test_graphql_delete_token(authorized_client, tokens_file): +def test_graphql_delete_token(authorized_client): test_devices = ORIGINAL_DEVICES.copy() device_to_delete = test_devices.pop(1) assert device_to_delete != DEVICE_WE_AUTH_TESTS_WITH @@ -121,7 +121,7 @@ def test_graphql_delete_token(authorized_client, tokens_file): assert_same(devices, test_devices) -def test_graphql_delete_self_token(authorized_client, tokens_file): +def test_graphql_delete_self_token(authorized_client): response = authorized_client.post( "/graphql", json={ @@ -137,7 +137,6 @@ def test_graphql_delete_self_token(authorized_client, tokens_file): def test_graphql_delete_nonexistent_token( authorized_client, - tokens_file, ): response = authorized_client.post( "/graphql", @@ -167,7 +166,7 @@ mutation RefreshToken { """ -def test_graphql_refresh_token_unauthorized(client, tokens_file): +def test_graphql_refresh_token_unauthorized(client): response = client.post( "/graphql", json={"query": REFRESH_TOKEN_MUTATION}, @@ -175,7 +174,7 @@ def test_graphql_refresh_token_unauthorized(client, tokens_file): assert_empty(response) -def test_graphql_refresh_token(authorized_client, client, tokens_file): +def test_graphql_refresh_token(authorized_client, client): caller_name_and_date = graphql_get_caller_token_info(authorized_client) response = authorized_client.post( "/graphql", @@ -206,7 +205,6 @@ mutation NewDeviceKey { def test_graphql_get_new_device_auth_key_unauthorized( client, - tokens_file, ): response = client.post( "/graphql", @@ -230,7 +228,6 @@ mutation InvalidateNewDeviceKey { def test_graphql_invalidate_new_device_token_unauthorized( client, - tokens_file, ): response = client.post( "/graphql", @@ -244,7 +241,7 @@ def test_graphql_invalidate_new_device_token_unauthorized( assert_empty(response) -def test_graphql_get_and_delete_new_device_key(client, authorized_client, tokens_file): +def test_graphql_get_and_delete_new_device_key(client, authorized_client): mnemonic_key = graphql_get_new_device_key(authorized_client) response = authorized_client.post( @@ -271,7 +268,7 @@ mutation AuthorizeWithNewDeviceKey($input: UseNewDeviceKeyInput!) { """ -def test_graphql_get_and_authorize_new_device(client, authorized_client, tokens_file): +def test_graphql_get_and_authorize_new_device(client, authorized_client): mnemonic_key = graphql_get_new_device_key(authorized_client) old_devices = graphql_get_devices(authorized_client) @@ -282,16 +279,14 @@ def test_graphql_get_and_authorize_new_device(client, authorized_client, tokens_ assert "new_device" in [device["name"] for device in new_devices] -def test_graphql_authorize_new_device_with_invalid_key( - client, authorized_client, tokens_file -): +def test_graphql_authorize_new_device_with_invalid_key(client, authorized_client): response = graphql_try_auth_new_device(client, "invalid_token", "new_device") assert_errorcode(get_data(response)["api"]["authorizeWithNewDeviceApiKey"], 404) assert_original(authorized_client) -def test_graphql_get_and_authorize_used_key(client, authorized_client, tokens_file): +def test_graphql_get_and_authorize_used_key(client, authorized_client): mnemonic_key = graphql_get_new_device_key(authorized_client) graphql_authorize_new_device(client, mnemonic_key, "new_device") @@ -304,7 +299,7 @@ def test_graphql_get_and_authorize_used_key(client, authorized_client, tokens_fi def test_graphql_get_and_authorize_key_after_12_minutes( - client, authorized_client, tokens_file, mocker + client, authorized_client, mocker ): mnemonic_key = graphql_get_new_device_key(authorized_client) mock = mocker.patch(DEVICE_KEY_VALIDATION_DATETIME, NearFuture) @@ -315,7 +310,6 @@ def test_graphql_get_and_authorize_key_after_12_minutes( def test_graphql_authorize_without_token( client, - tokens_file, ): response = client.post( "/graphql", diff --git a/tests/test_graphql/test_api_recovery.py b/tests/test_graphql/test_api_recovery.py index f53394f..ea44640 100644 --- a/tests/test_graphql/test_api_recovery.py +++ b/tests/test_graphql/test_api_recovery.py @@ -14,9 +14,9 @@ from tests.common import ( ) # Graphql API's output should be timezone-naive -from tests.common import five_minutes_into_future_naive_utc as five_minutes_into_future -from tests.common import five_minutes_into_future as five_minutes_into_future_tz -from tests.common import five_minutes_into_past_naive_utc as five_minutes_into_past +from tests.common import ten_minutes_into_future_naive_utc as ten_minutes_into_future +from tests.common import ten_minutes_into_future as ten_minutes_into_future_tz +from tests.common import ten_minutes_into_past_naive_utc as ten_minutes_into_past from tests.test_graphql.common import ( assert_empty, @@ -111,12 +111,12 @@ def graphql_use_recovery_key(client, key, device_name): return token -def test_graphql_recovery_key_status_unauthorized(client, tokens_file): +def test_graphql_recovery_key_status_unauthorized(client): response = request_recovery_status(client) assert_empty(response) -def test_graphql_recovery_key_status_when_none_exists(authorized_client, tokens_file): +def test_graphql_recovery_key_status_when_none_exists(authorized_client): status = graphql_recovery_status(authorized_client) assert status["exists"] is False assert status["valid"] is False @@ -152,7 +152,7 @@ mutation TestUseRecoveryKey($input: UseRecoveryKeyInput!) { """ -def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): +def test_graphql_generate_recovery_key(client, authorized_client): key = graphql_make_new_recovery_key(authorized_client) status = graphql_recovery_status(authorized_client) @@ -168,10 +168,10 @@ def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): @pytest.mark.parametrize( - "expiration_date", [five_minutes_into_future(), five_minutes_into_future_tz()] + "expiration_date", [ten_minutes_into_future(), ten_minutes_into_future_tz()] ) def test_graphql_generate_recovery_key_with_expiration_date( - client, authorized_client, tokens_file, expiration_date: datetime + client, authorized_client, expiration_date: datetime ): key = graphql_make_new_recovery_key(authorized_client, expires_at=expiration_date) @@ -192,10 +192,8 @@ def test_graphql_generate_recovery_key_with_expiration_date( graphql_use_recovery_key(client, key, "new_test_token2") -def test_graphql_use_recovery_key_after_expiration( - client, authorized_client, tokens_file, mocker -): - expiration_date = five_minutes_into_future() +def test_graphql_use_recovery_key_after_expiration(client, authorized_client, mocker): + expiration_date = ten_minutes_into_future() key = graphql_make_new_recovery_key(authorized_client, expires_at=expiration_date) # Timewarp to after it expires @@ -220,10 +218,8 @@ def test_graphql_use_recovery_key_after_expiration( assert status["usesLeft"] is None -def test_graphql_generate_recovery_key_with_expiration_in_the_past( - authorized_client, tokens_file -): - expiration_date = five_minutes_into_past() +def test_graphql_generate_recovery_key_with_expiration_in_the_past(authorized_client): + expiration_date = ten_minutes_into_past() response = request_make_new_recovery_key( authorized_client, expires_at=expiration_date ) @@ -235,9 +231,7 @@ def test_graphql_generate_recovery_key_with_expiration_in_the_past( assert graphql_recovery_status(authorized_client)["exists"] is False -def test_graphql_generate_recovery_key_with_invalid_time_format( - authorized_client, tokens_file -): +def test_graphql_generate_recovery_key_with_invalid_time_format(authorized_client): expiration_date = "invalid_time_format" expiration_date_str = expiration_date @@ -256,10 +250,7 @@ def test_graphql_generate_recovery_key_with_invalid_time_format( assert graphql_recovery_status(authorized_client)["exists"] is False -def test_graphql_generate_recovery_key_with_limited_uses( - authorized_client, client, tokens_file -): - +def test_graphql_generate_recovery_key_with_limited_uses(authorized_client, client): mnemonic_key = graphql_make_new_recovery_key(authorized_client, uses=2) status = graphql_recovery_status(authorized_client) @@ -292,9 +283,7 @@ def test_graphql_generate_recovery_key_with_limited_uses( assert_errorcode(output, 404) -def test_graphql_generate_recovery_key_with_negative_uses( - authorized_client, tokens_file -): +def test_graphql_generate_recovery_key_with_negative_uses(authorized_client): response = request_make_new_recovery_key(authorized_client, uses=-1) output = get_data(response)["api"]["getNewRecoveryApiKey"] @@ -303,7 +292,7 @@ def test_graphql_generate_recovery_key_with_negative_uses( assert graphql_recovery_status(authorized_client)["exists"] is False -def test_graphql_generate_recovery_key_with_zero_uses(authorized_client, tokens_file): +def test_graphql_generate_recovery_key_with_zero_uses(authorized_client): response = request_make_new_recovery_key(authorized_client, uses=0) output = get_data(response)["api"]["getNewRecoveryApiKey"] diff --git a/tests/test_graphql/test_services.py b/tests/test_graphql/test_services.py index 1c1374a..3983b56 100644 --- a/tests/test_graphql/test_services.py +++ b/tests/test_graphql/test_services.py @@ -503,7 +503,7 @@ def test_move_same_volume(authorized_client, dummy_service): def test_mailservice_cannot_enable_disable(authorized_client): - mailservice = get_service_by_id("email") + mailservice = get_service_by_id("simple-nixos-mailserver") mutation_response = api_enable(authorized_client, mailservice) data = get_data(mutation_response)["services"]["enableService"] diff --git a/tests/test_graphql/test_ssh.py b/tests/test_graphql/test_ssh.py index 2a2c259..945f105 100644 --- a/tests/test_graphql/test_ssh.py +++ b/tests/test_graphql/test_ssh.py @@ -308,7 +308,6 @@ original_settings = [ def test_graphql_readwrite_ssh_settings( authorized_client, some_users, settings, original_settings ): - # Userdata-related tests like undefined fields are in actions-level tests. output = api_set_ssh_settings_dict(authorized_client, original_settings) assert_includes(api_ssh_settings(authorized_client), output) @@ -334,7 +333,6 @@ forbidden_settings = [ def test_graphql_readwrite_ssh_settings_partial( authorized_client, some_users, settings, original_settings ): - output = api_set_ssh_settings_dict(authorized_client, original_settings) with pytest.raises(Exception): output = api_set_ssh_settings_dict(authorized_client, settings) diff --git a/tests/test_graphql/test_ssh/some_users.json b/tests/test_graphql/test_ssh/some_users.json index c02d216..b81513d 100644 --- a/tests/test_graphql/test_ssh/some_users.json +++ b/tests/test_graphql/test_ssh/some_users.json @@ -1,43 +1,17 @@ { - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false + "dns": { + "provider": "CLOUDFLARE", + "useStagingACME": false }, - "bitwarden": { - "enable": false + "server": { + "provider": "HETZNER" }, - "databasePassword": "PASSWORD", - "domain": "test.tld", + "domain": "test-domain.tld", "hashedMasterPassword": "HASHED_PASSWORD", "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, "timezone": "Europe/Moscow", + "username": "tester", + "useBinds": true, "sshKeys": [ "ssh-rsa KEY test@pc" ], @@ -60,17 +34,50 @@ "hashedPassword": "HASHED_PASSWORD_3" } ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" + "autoUpgrade": { + "enable": true, + "allowReboot": true }, - "server": { - "provider": "HETZNER" + "modules": { + "bitwarden": { + "enable": true, + "location": "sdb" + }, + "gitea": { + "enable": true, + "location": "sdb" + }, + "jitsi-meet": { + "enable": true + }, + "nextcloud": { + "enable": true, + "location": "sdb" + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true, + "location": "sdb" + }, + "simple-nixos-mailserver": { + "enable": true, + "location": "sdb" + } }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" + "volumes": [ + { + "device": "/dev/sdb", + "mountPoint": "/volumes/sdb", + "fsType": "ext4" + } + ], + "ssh": { + "enable": true, + "passwordAuthentication": true, + "rootKeys": [ + "ssh-ed25519 KEY test@pc" + ] } } diff --git a/tests/test_graphql/test_system.py b/tests/test_graphql/test_system.py index c318fe7..36a1cc1 100644 --- a/tests/test_graphql/test_system.py +++ b/tests/test_graphql/test_system.py @@ -9,18 +9,12 @@ from tests.test_graphql.common import assert_empty from tests.test_dkim import no_dkim_file, dkim_file -@pytest.fixture -def domain_file(mocker, datadir): - mocker.patch("selfprivacy_api.utils.DOMAIN_FILE", datadir / "domain") - return datadir - - @pytest.fixture def turned_on(mocker, datadir): mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_on.json") assert read_json(datadir / "turned_on.json")["autoUpgrade"]["enable"] == True assert read_json(datadir / "turned_on.json")["autoUpgrade"]["allowReboot"] == True - assert read_json(datadir / "turned_on.json")["timezone"] == "Europe/Moscow" + assert read_json(datadir / "turned_on.json")["timezone"] == "Etc/UTC" return datadir @@ -29,7 +23,7 @@ def turned_off(mocker, datadir): mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_off.json") assert read_json(datadir / "turned_off.json")["autoUpgrade"]["enable"] == False assert read_json(datadir / "turned_off.json")["autoUpgrade"]["allowReboot"] == False - assert read_json(datadir / "turned_off.json")["timezone"] == "Europe/Moscow" + assert read_json(datadir / "turned_off.json")["timezone"] == "Etc/UTC" return datadir @@ -251,7 +245,7 @@ def is_dns_record_in_array(records, dns_record) -> bool: def test_graphql_get_domain( - authorized_client, domain_file, mock_get_ip4, mock_get_ip6, turned_on, mock_dkim_key + authorized_client, mock_get_ip4, mock_get_ip6, turned_on, mock_dkim_key ): """Test get domain""" response = authorized_client.post( @@ -262,7 +256,9 @@ def test_graphql_get_domain( ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["system"]["domainInfo"]["domain"] == "test.tld" + assert ( + response.json()["data"]["system"]["domainInfo"]["domain"] == "test-domain.tld" + ) assert ( response.json()["data"]["system"]["domainInfo"]["hostname"] == "test-instance" ) @@ -335,7 +331,6 @@ def test_graphql_get_domain( def test_graphql_get_domain_no_dkim( authorized_client, - domain_file, mock_get_ip4, mock_get_ip6, no_dkim_file, @@ -384,7 +379,7 @@ def test_graphql_get_timezone(authorized_client, turned_on): ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["system"]["settings"]["timezone"] == "Europe/Moscow" + assert response.json()["data"]["system"]["settings"]["timezone"] == "Etc/UTC" def test_graphql_get_timezone_on_undefined(authorized_client, undefined_config): @@ -397,9 +392,7 @@ def test_graphql_get_timezone_on_undefined(authorized_client, undefined_config): ) assert response.status_code == 200 assert response.json().get("data") is not None - assert ( - response.json()["data"]["system"]["settings"]["timezone"] == "Europe/Uzhgorod" - ) + assert response.json()["data"]["system"]["settings"]["timezone"] == "Etc/UTC" API_CHANGE_TIMEZONE_MUTATION = """ @@ -423,7 +416,7 @@ def test_graphql_change_timezone_unauthorized(client, turned_on): json={ "query": API_CHANGE_TIMEZONE_MUTATION, "variables": { - "timezone": "Europe/Moscow", + "timezone": "Etc/UTC", }, }, ) @@ -495,7 +488,7 @@ def test_graphql_change_timezone_without_timezone(authorized_client, turned_on): assert response.json()["data"]["system"]["changeTimezone"]["message"] is not None assert response.json()["data"]["system"]["changeTimezone"]["code"] == 400 assert response.json()["data"]["system"]["changeTimezone"]["timezone"] is None - assert read_json(turned_on / "turned_on.json")["timezone"] == "Europe/Moscow" + assert read_json(turned_on / "turned_on.json")["timezone"] == "Etc/UTC" def test_graphql_change_timezone_with_invalid_timezone(authorized_client, turned_on): @@ -515,7 +508,7 @@ def test_graphql_change_timezone_with_invalid_timezone(authorized_client, turned assert response.json()["data"]["system"]["changeTimezone"]["message"] is not None assert response.json()["data"]["system"]["changeTimezone"]["code"] == 400 assert response.json()["data"]["system"]["changeTimezone"]["timezone"] is None - assert read_json(turned_on / "turned_on.json")["timezone"] == "Europe/Moscow" + assert read_json(turned_on / "turned_on.json")["timezone"] == "Etc/UTC" API_GET_AUTO_UPGRADE_SETTINGS_QUERY = """ diff --git a/tests/test_graphql/test_system/domain b/tests/test_graphql/test_system/domain deleted file mode 100644 index 3679d0d..0000000 --- a/tests/test_graphql/test_system/domain +++ /dev/null @@ -1 +0,0 @@ -test-domain.tld \ No newline at end of file diff --git a/tests/test_graphql/test_system/no_values.json b/tests/test_graphql/test_system/no_values.json index 779691f..954790c 100644 --- a/tests/test_graphql/test_system/no_values.json +++ b/tests/test_graphql/test_system/no_values.json @@ -1,55 +1,62 @@ { - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false + "dns": { + "provider": "CLOUDFLARE", + "useStagingACME": false }, - "bitwarden": { - "enable": true + "server": { + "provider": "HETZNER" }, - "databasePassword": "PASSWORD", - "domain": "test.tld", + "domain": "test-domain.tld", "hashedMasterPassword": "HASHED_PASSWORD", "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true + "timezone": "Etc/UTC", + "username": "tester", + "useBinds": true, + "sshKeys": [ + "ssh-rsa KEY test@pc" + ], + "users": [], + "autoUpgrade": {}, + "modules": { + "bitwarden": { + "enable": true, + "location": "sdb" + }, + "gitea": { + "enable": true, + "location": "sdb" + }, + "jitsi-meet": { + "enable": true + }, + "nextcloud": { + "enable": true, + "location": "sdb" + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true, + "location": "sdb" + }, + "simple-nixos-mailserver": { + "enable": true, + "location": "sdb" + } }, - "resticPassword": "PASS", + "volumes": [ + { + "device": "/dev/sdb", + "mountPoint": "/volumes/sdb", + "fsType": "ext4" + } + ], "ssh": { "enable": true, "passwordAuthentication": true, "rootKeys": [ "ssh-ed25519 KEY test@pc" ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" } } diff --git a/tests/test_graphql/test_system/turned_off.json b/tests/test_graphql/test_system/turned_off.json index 5fc287c..1453366 100644 --- a/tests/test_graphql/test_system/turned_off.json +++ b/tests/test_graphql/test_system/turned_off.json @@ -1,57 +1,65 @@ { - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false + "dns": { + "provider": "CLOUDFLARE", + "useStagingACME": false }, - "bitwarden": { - "enable": true + "server": { + "provider": "HETZNER" }, - "databasePassword": "PASSWORD", - "domain": "test.tld", + "domain": "test-domain.tld", "hashedMasterPassword": "HASHED_PASSWORD", "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true + "timezone": "Etc/UTC", + "username": "tester", + "useBinds": true, + "sshKeys": [ + "ssh-rsa KEY test@pc" + ], + "users": [], + "autoUpgrade": { + "enable": false, + "allowReboot": false }, - "resticPassword": "PASS", + "modules": { + "bitwarden": { + "enable": true, + "location": "sdb" + }, + "gitea": { + "enable": true, + "location": "sdb" + }, + "jitsi-meet": { + "enable": true + }, + "nextcloud": { + "enable": true, + "location": "sdb" + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true, + "location": "sdb" + }, + "simple-nixos-mailserver": { + "enable": true, + "location": "sdb" + } + }, + "volumes": [ + { + "device": "/dev/sdb", + "mountPoint": "/volumes/sdb", + "fsType": "ext4" + } + ], "ssh": { "enable": true, "passwordAuthentication": true, "rootKeys": [ "ssh-ed25519 KEY test@pc" ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": false, - "allowReboot": false - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" } } diff --git a/tests/test_graphql/test_system/turned_on.json b/tests/test_graphql/test_system/turned_on.json index 240c6c9..2f31047 100644 --- a/tests/test_graphql/test_system/turned_on.json +++ b/tests/test_graphql/test_system/turned_on.json @@ -1,62 +1,65 @@ { - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false + "dns": { + "provider": "CLOUDFLARE", + "useStagingACME": false }, - "databasePassword": "PASSWORD", - "domain": "test.tld", + "server": { + "provider": "HETZNER" + }, + "domain": "test-domain.tld", "hashedMasterPassword": "HASHED_PASSWORD", "hostname": "test-instance", - "resticPassword": "PASS", + "timezone": "Etc/UTC", + "username": "tester", + "useBinds": true, + "sshKeys": [ + "ssh-rsa KEY test@pc" + ], + "users": [], + "autoUpgrade": { + "enable": true, + "allowReboot": true + }, + "modules": { + "bitwarden": { + "enable": true, + "location": "sdb" + }, + "gitea": { + "enable": true, + "location": "sdb" + }, + "jitsi-meet": { + "enable": true + }, + "nextcloud": { + "enable": true, + "location": "sdb" + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true, + "location": "sdb" + }, + "simple-nixos-mailserver": { + "enable": true, + "location": "sdb" + } + }, + "volumes": [ + { + "device": "/dev/sdb", + "mountPoint": "/volumes/sdb", + "fsType": "ext4" + } + ], "ssh": { "enable": true, "passwordAuthentication": true, "rootKeys": [ "ssh-ed25519 KEY test@pc" ] - }, - "username": "tester", - "modules": { - "gitea": { - "enable": true - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "jitsi": { - "enable": true - }, - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "bitwarden": { - "enable": true - } - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" } } diff --git a/tests/test_graphql/test_system/undefined.json b/tests/test_graphql/test_system/undefined.json index 2e31fea..89ad3ff 100644 --- a/tests/test_graphql/test_system/undefined.json +++ b/tests/test_graphql/test_system/undefined.json @@ -1,52 +1,60 @@ { - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false + "dns": { + "provider": "CLOUDFLARE", + "useStagingACME": false }, - "bitwarden": { - "enable": true + "server": { + "provider": "HETZNER" }, - "databasePassword": "PASSWORD", - "domain": "test.tld", + "domain": "test-domain.tld", "hashedMasterPassword": "HASHED_PASSWORD", "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true + "username": "tester", + "useBinds": true, + "sshKeys": [ + "ssh-rsa KEY test@pc" + ], + "users": [], + "modules": { + "bitwarden": { + "enable": true, + "location": "sdb" + }, + "gitea": { + "enable": true, + "location": "sdb" + }, + "jitsi-meet": { + "enable": true + }, + "nextcloud": { + "enable": true, + "location": "sdb" + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true, + "location": "sdb" + }, + "simple-nixos-mailserver": { + "enable": true, + "location": "sdb" + } }, - "resticPassword": "PASS", + "volumes": [ + { + "device": "/dev/sdb", + "mountPoint": "/volumes/sdb", + "fsType": "ext4" + } + ], "ssh": { "enable": true, "passwordAuthentication": true, "rootKeys": [ "ssh-ed25519 KEY test@pc" ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" } } diff --git a/tests/test_graphql/test_system_nixos_tasks.py b/tests/test_graphql/test_system_nixos_tasks.py index 6052e9f..4a750c4 100644 --- a/tests/test_graphql/test_system_nixos_tasks.py +++ b/tests/test_graphql/test_system_nixos_tasks.py @@ -4,12 +4,6 @@ import pytest -@pytest.fixture -def domain_file(mocker, datadir): - mocker.patch("selfprivacy_api.utils.DOMAIN_FILE", datadir / "domain") - return datadir - - class ProcessMock: """Mock subprocess.Popen""" diff --git a/tests/test_graphql/test_users.py b/tests/test_graphql/test_users.py index 99f5934..5c6e7e4 100644 --- a/tests/test_graphql/test_users.py +++ b/tests/test_graphql/test_users.py @@ -254,7 +254,6 @@ def test_graphql_get_one_user_unauthorized(client, one_user, mock_subprocess_pop def test_graphql_get_one_user(authorized_client, one_user, mock_subprocess_popen): - response = authorized_client.post( "/graphql", json={ @@ -275,7 +274,6 @@ def test_graphql_get_one_user(authorized_client, one_user, mock_subprocess_popen def test_graphql_get_some_user_undefined(authorized_client, undefined_settings): - response = authorized_client.post( "/graphql", json={ diff --git a/tests/test_graphql/test_users/no_users.json b/tests/test_graphql/test_users/no_users.json index a40fb88..2f31047 100644 --- a/tests/test_graphql/test_users/no_users.json +++ b/tests/test_graphql/test_users/no_users.json @@ -1,59 +1,65 @@ { - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false + "dns": { + "provider": "CLOUDFLARE", + "useStagingACME": false }, - "bitwarden": { - "enable": false + "server": { + "provider": "HETZNER" }, - "databasePassword": "PASSWORD", - "domain": "test.tld", + "domain": "test-domain.tld", "hashedMasterPassword": "HASHED_PASSWORD", "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true + "timezone": "Etc/UTC", + "username": "tester", + "useBinds": true, + "sshKeys": [ + "ssh-rsa KEY test@pc" + ], + "users": [], + "autoUpgrade": { + "enable": true, + "allowReboot": true }, - "resticPassword": "PASS", + "modules": { + "bitwarden": { + "enable": true, + "location": "sdb" + }, + "gitea": { + "enable": true, + "location": "sdb" + }, + "jitsi-meet": { + "enable": true + }, + "nextcloud": { + "enable": true, + "location": "sdb" + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true, + "location": "sdb" + }, + "simple-nixos-mailserver": { + "enable": true, + "location": "sdb" + } + }, + "volumes": [ + { + "device": "/dev/sdb", + "mountPoint": "/volumes/sdb", + "fsType": "ext4" + } + ], "ssh": { "enable": true, "passwordAuthentication": true, "rootKeys": [ "ssh-ed25519 KEY test@pc" ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "users": [ - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" } } diff --git a/tests/test_graphql/test_users/one_user.json b/tests/test_graphql/test_users/one_user.json index 7e1cced..68f06f8 100644 --- a/tests/test_graphql/test_users/one_user.json +++ b/tests/test_graphql/test_users/one_user.json @@ -1,43 +1,17 @@ { - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false + "dns": { + "provider": "CLOUDFLARE", + "useStagingACME": false }, - "bitwarden": { - "enable": false + "server": { + "provider": "HETZNER" }, - "databasePassword": "PASSWORD", - "domain": "test.tld", + "domain": "test-domain.tld", "hashedMasterPassword": "HASHED_PASSWORD", "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, + "timezone": "Etc/UTC", "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", + "useBinds": true, "sshKeys": [ "ssh-rsa KEY test@pc" ], @@ -50,17 +24,50 @@ ] } ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" + "autoUpgrade": { + "enable": true, + "allowReboot": true }, - "server": { - "provider": "HETZNER" + "modules": { + "bitwarden": { + "enable": true, + "location": "sdb" + }, + "gitea": { + "enable": true, + "location": "sdb" + }, + "jitsi-meet": { + "enable": true + }, + "nextcloud": { + "enable": true, + "location": "sdb" + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true, + "location": "sdb" + }, + "simple-nixos-mailserver": { + "enable": true, + "location": "sdb" + } }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" + "volumes": [ + { + "device": "/dev/sdb", + "mountPoint": "/volumes/sdb", + "fsType": "ext4" + } + ], + "ssh": { + "enable": true, + "passwordAuthentication": true, + "rootKeys": [ + "ssh-ed25519 KEY test@pc" + ] } } diff --git a/tests/test_graphql/test_users/some_users.json b/tests/test_graphql/test_users/some_users.json index c02d216..3ad366e 100644 --- a/tests/test_graphql/test_users/some_users.json +++ b/tests/test_graphql/test_users/some_users.json @@ -1,43 +1,17 @@ { - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false + "dns": { + "provider": "CLOUDFLARE", + "useStagingACME": false }, - "bitwarden": { - "enable": false + "server": { + "provider": "HETZNER" }, - "databasePassword": "PASSWORD", - "domain": "test.tld", + "domain": "test-domain.tld", "hashedMasterPassword": "HASHED_PASSWORD", "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, + "timezone": "Etc/UTC", "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", + "useBinds": true, "sshKeys": [ "ssh-rsa KEY test@pc" ], @@ -60,17 +34,50 @@ "hashedPassword": "HASHED_PASSWORD_3" } ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" + "autoUpgrade": { + "enable": true, + "allowReboot": true }, - "server": { - "provider": "HETZNER" + "modules": { + "bitwarden": { + "enable": true, + "location": "sdb" + }, + "gitea": { + "enable": true, + "location": "sdb" + }, + "jitsi-meet": { + "enable": true + }, + "nextcloud": { + "enable": true, + "location": "sdb" + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true, + "location": "sdb" + }, + "simple-nixos-mailserver": { + "enable": true, + "location": "sdb" + } }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" + "volumes": [ + { + "device": "/dev/sdb", + "mountPoint": "/volumes/sdb", + "fsType": "ext4" + } + ], + "ssh": { + "enable": true, + "passwordAuthentication": true, + "rootKeys": [ + "ssh-ed25519 KEY test@pc" + ] } } diff --git a/tests/test_graphql/test_users/undefined.json b/tests/test_graphql/test_users/undefined.json index ae9cd9e..26e3678 100644 --- a/tests/test_graphql/test_users/undefined.json +++ b/tests/test_graphql/test_users/undefined.json @@ -1,57 +1,64 @@ { - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false + "dns": { + "provider": "CLOUDFLARE", + "useStagingACME": false }, - "bitwarden": { - "enable": false + "server": { + "provider": "HETZNER" }, - "databasePassword": "PASSWORD", - "domain": "test.tld", + "domain": "test-domain.tld", "hashedMasterPassword": "HASHED_PASSWORD", "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true + "timezone": "Etc/UTC", + "username": "tester", + "useBinds": true, + "sshKeys": [ + "ssh-rsa KEY test@pc" + ], + "autoUpgrade": { + "enable": true, + "allowReboot": true }, - "resticPassword": "PASS", + "modules": { + "bitwarden": { + "enable": true, + "location": "sdb" + }, + "gitea": { + "enable": true, + "location": "sdb" + }, + "jitsi-meet": { + "enable": true + }, + "nextcloud": { + "enable": true, + "location": "sdb" + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true, + "location": "sdb" + }, + "simple-nixos-mailserver": { + "enable": true, + "location": "sdb" + } + }, + "volumes": [ + { + "device": "/dev/sdb", + "mountPoint": "/volumes/sdb", + "fsType": "ext4" + } + ], "ssh": { "enable": true, "passwordAuthentication": true, "rootKeys": [ "ssh-ed25519 KEY test@pc" ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" } } diff --git a/tests/test_migrations.py b/tests/test_migrations.py deleted file mode 100644 index 55f311a..0000000 --- a/tests/test_migrations.py +++ /dev/null @@ -1,60 +0,0 @@ -import pytest - -from selfprivacy_api.migrations.modules_in_json import CreateModulesField -from selfprivacy_api.utils import ReadUserData, WriteUserData -from selfprivacy_api.services import get_all_services - - -@pytest.fixture() -def stray_services(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "strays.json") - return datadir - - -@pytest.fixture() -def empty_json(generic_userdata): - with WriteUserData() as data: - data.clear() - - with ReadUserData() as data: - assert len(data.keys()) == 0 - - return - - -def test_modules_empty_json(empty_json): - with ReadUserData() as data: - assert "modules" not in data.keys() - - assert CreateModulesField().is_migration_needed() - - CreateModulesField().migrate() - assert not CreateModulesField().is_migration_needed() - - with ReadUserData() as data: - assert "modules" in data.keys() - - -@pytest.mark.parametrize("modules_field", [True, False]) -def test_modules_stray_services(modules_field, stray_services): - if not modules_field: - with WriteUserData() as data: - del data["modules"] - assert CreateModulesField().is_migration_needed() - - CreateModulesField().migrate() - - for service in get_all_services(): - # assumes we do not tolerate previous format - assert service.is_enabled() - if service.get_id() == "email": - continue - with ReadUserData() as data: - assert service.get_id() in data["modules"].keys() - assert service.get_id() not in data.keys() - - assert not CreateModulesField().is_migration_needed() - - -def test_modules_no_migration_on_generic_data(generic_userdata): - assert not CreateModulesField().is_migration_needed() diff --git a/tests/test_migrations/strays.json b/tests/test_migrations/strays.json deleted file mode 100644 index ee81350..0000000 --- a/tests/test_migrations/strays.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "bitwarden": { - "enable": true - }, - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "gitea": { - "enable": true - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "jitsi": { - "enable": true - }, - "modules": {} -} diff --git a/tests/test_repository/test_json_tokens_repository.py b/tests/test_repository/test_json_tokens_repository.py deleted file mode 100644 index 23df9df..0000000 --- a/tests/test_repository/test_json_tokens_repository.py +++ /dev/null @@ -1,245 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=missing-function-docstring -""" -tests that restrict json token repository implementation -""" - -import pytest - - -from datetime import datetime - -from selfprivacy_api.models.tokens.token import Token -from selfprivacy_api.repositories.tokens.exceptions import ( - TokenNotFound, - RecoveryKeyNotFound, - NewDeviceKeyNotFound, -) -from selfprivacy_api.repositories.tokens.json_tokens_repository import ( - JsonTokensRepository, -) - -from tests.common import read_json -from test_tokens_repository import ( - mock_recovery_key_generate, - mock_generate_token, - mock_new_device_key_generate, -) - -ORIGINAL_TOKEN_CONTENT = [ - { - "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", - "name": "primary_token", - "date": "2022-07-15 17:41:31.675698", - }, - { - "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", - "name": "second_token", - "date": "2022-07-15 17:41:31.675698Z", - }, - { - "token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", - "name": "third_token", - "date": "2022-07-15T17:41:31.675698Z", - }, - { - "token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", - "name": "forth_token", - "date": "2022-07-15T17:41:31.675698", - }, -] - -EMPTY_KEYS_JSON = """ -{ - "tokens": [ - { - "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", - "name": "primary_token", - "date": "2022-07-15 17:41:31.675698" - } - ] -} -""" - - -@pytest.fixture -def tokens(mocker, datadir): - mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=datadir / "tokens.json") - assert read_json(datadir / "tokens.json")["tokens"] == ORIGINAL_TOKEN_CONTENT - return datadir - - -@pytest.fixture -def empty_keys(mocker, tmpdir): - tokens_file = tmpdir / "empty_keys.json" - with open(tokens_file, "w") as file: - file.write(EMPTY_KEYS_JSON) - mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=tokens_file) - assert read_json(tokens_file)["tokens"] == [ - { - "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", - "name": "primary_token", - "date": "2022-07-15 17:41:31.675698", - } - ] - return tmpdir - - -@pytest.fixture -def null_keys(mocker, datadir): - mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=datadir / "null_keys.json") - assert read_json(datadir / "null_keys.json")["recovery_token"] is None - assert read_json(datadir / "null_keys.json")["new_device"] is None - return datadir - - -def test_delete_token(tokens): - repo = JsonTokensRepository() - input_token = Token( - token="KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", - device_name="primary_token", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), - ) - - repo.delete_token(input_token) - assert read_json(tokens / "tokens.json")["tokens"] == [ - { - "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", - "name": "second_token", - "date": "2022-07-15 17:41:31.675698Z", - }, - { - "token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", - "name": "third_token", - "date": "2022-07-15T17:41:31.675698Z", - }, - { - "token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", - "name": "forth_token", - "date": "2022-07-15T17:41:31.675698", - }, - ] - - -def test_delete_not_found_token(tokens): - repo = JsonTokensRepository() - input_token = Token( - token="imbadtoken", - device_name="primary_token", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), - ) - with pytest.raises(TokenNotFound): - assert repo.delete_token(input_token) is None - - assert read_json(tokens / "tokens.json")["tokens"] == ORIGINAL_TOKEN_CONTENT - - -def test_create_recovery_key(tokens, mock_recovery_key_generate): - repo = JsonTokensRepository() - - assert repo.create_recovery_key(uses_left=1, expiration=None) is not None - assert read_json(tokens / "tokens.json")["recovery_token"] == { - "token": "889bf49c1d3199d71a2e704718772bd53a422020334db051", - "date": "2022-07-15T17:41:31.675698", - "expiration": None, - "uses_left": 1, - } - - -def test_use_mnemonic_recovery_key_when_null(null_keys): - repo = JsonTokensRepository() - - with pytest.raises(RecoveryKeyNotFound): - assert ( - repo.use_mnemonic_recovery_key( - mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb", - device_name="primary_token", - ) - is None - ) - - -def test_use_mnemonic_recovery_key(tokens, mock_generate_token): - repo = JsonTokensRepository() - - assert repo.use_mnemonic_recovery_key( - mnemonic_phrase="uniform clarify napkin bid dress search input armor police cross salon because myself uphold slice bamboo hungry park", - device_name="newdevice", - ) == Token( - token="ur71mC4aiI6FIYAN--cTL-38rPHS5D6NuB1bgN_qKF4", - device_name="newdevice", - created_at=datetime(2022, 11, 14, 6, 6, 32, 777123), - ) - - assert read_json(tokens / "tokens.json")["tokens"] == [ - { - "date": "2022-07-15 17:41:31.675698", - "name": "primary_token", - "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", - }, - { - "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", - "name": "second_token", - "date": "2022-07-15 17:41:31.675698Z", - }, - { - "token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", - "name": "third_token", - "date": "2022-07-15T17:41:31.675698Z", - }, - { - "token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", - "name": "forth_token", - "date": "2022-07-15T17:41:31.675698", - }, - { - "date": "2022-11-14T06:06:32.777123", - "name": "newdevice", - "token": "ur71mC4aiI6FIYAN--cTL-38rPHS5D6NuB1bgN_qKF4", - }, - ] - assert read_json(tokens / "tokens.json")["recovery_token"] == { - "date": "2022-11-11T11:48:54.228038", - "expiration": None, - "token": "ed653e4b8b042b841d285fa7a682fa09e925ddb2d8906f54", - "uses_left": 1, - } - - -def test_get_new_device_key(tokens, mock_new_device_key_generate): - repo = JsonTokensRepository() - - assert repo.get_new_device_key() is not None - assert read_json(tokens / "tokens.json")["new_device"] == { - "date": "2022-07-15T17:41:31.675698", - "expiration": "2022-07-15T17:41:31.675698", - "token": "43478d05b35e4781598acd76e33832bb", - } - - -def test_delete_new_device_key(tokens): - repo = JsonTokensRepository() - - assert repo.delete_new_device_key() is None - assert "new_device" not in read_json(tokens / "tokens.json") - - -def test_delete_new_device_key_when_empty(empty_keys): - repo = JsonTokensRepository() - - repo.delete_new_device_key() - assert "new_device" not in read_json(empty_keys / "empty_keys.json") - - -def test_use_mnemonic_new_device_key_when_null(null_keys): - repo = JsonTokensRepository() - - with pytest.raises(NewDeviceKeyNotFound): - assert ( - repo.use_mnemonic_new_device_key( - device_name="imnew", - mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb", - ) - is None - ) diff --git a/tests/test_repository/test_json_tokens_repository/empty_keys.json b/tests/test_repository/test_json_tokens_repository/empty_keys.json deleted file mode 100644 index 2131ddf..0000000 --- a/tests/test_repository/test_json_tokens_repository/empty_keys.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "tokens": [ - { - "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", - "name": "primary_token", - "date": "2022-07-15 17:41:31.675698" - } - ] -} diff --git a/tests/test_repository/test_json_tokens_repository/null_keys.json b/tests/test_repository/test_json_tokens_repository/null_keys.json deleted file mode 100644 index 45e6f90..0000000 --- a/tests/test_repository/test_json_tokens_repository/null_keys.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "tokens": [ - { - "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", - "name": "primary_token", - "date": "2022-07-15 17:41:31.675698" - }, - { - "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", - "name": "second_token", - "date": "2022-07-15 17:41:31.675698Z" - }, - { - "token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", - "name": "third_token", - "date": "2022-07-15T17:41:31.675698Z" - }, - { - "token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", - "name": "forth_token", - "date": "2022-07-15T17:41:31.675698" - } - ], - "recovery_token": null, - "new_device": null -} diff --git a/tests/test_repository/test_json_tokens_repository/tokens.json b/tests/test_repository/test_json_tokens_repository/tokens.json deleted file mode 100644 index bb1805c..0000000 --- a/tests/test_repository/test_json_tokens_repository/tokens.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "tokens": [ - { - "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", - "name": "primary_token", - "date": "2022-07-15 17:41:31.675698" - }, - { - "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", - "name": "second_token", - "date": "2022-07-15 17:41:31.675698Z" - }, - { - "token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", - "name": "third_token", - "date": "2022-07-15T17:41:31.675698Z" - }, - { - "token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", - "name": "forth_token", - "date": "2022-07-15T17:41:31.675698" - } - ], - "recovery_token": { - "token": "ed653e4b8b042b841d285fa7a682fa09e925ddb2d8906f54", - "date": "2022-11-11T11:48:54.228038", - "expiration": null, - "uses_left": 2 - }, - "new_device": { - "token": "2237238de23dc71ab558e317bdb8ff8e", - "date": "2022-10-26 20:50:47.973212", - "expiration": "2022-10-26 21:00:47.974153" - } -} diff --git a/tests/test_repository/test_tokens_repository.py b/tests/test_repository/test_tokens_repository.py index eb5e7cb..0ffc76b 100644 --- a/tests/test_repository/test_tokens_repository.py +++ b/tests/test_repository/test_tokens_repository.py @@ -17,9 +17,6 @@ from selfprivacy_api.repositories.tokens.exceptions import ( NewDeviceKeyNotFound, ) -from selfprivacy_api.repositories.tokens.json_tokens_repository import ( - JsonTokensRepository, -) from selfprivacy_api.repositories.tokens.redis_tokens_repository import ( RedisTokensRepository, ) @@ -27,7 +24,7 @@ from selfprivacy_api.repositories.tokens.abstract_tokens_repository import ( AbstractTokensRepository, ) -from tests.common import five_minutes_into_past, five_minutes_into_future +from tests.common import ten_minutes_into_past, ten_minutes_into_future ORIGINAL_DEVICE_NAMES = [ @@ -133,10 +130,8 @@ def mock_recovery_key_generate(mocker): return mock -@pytest.fixture(params=["json", "redis"]) -def empty_repo(request, empty_json_repo, empty_redis_repo): - if request.param == "json": - return empty_json_repo +@pytest.fixture(params=["redis"]) +def empty_repo(request, empty_redis_repo): if request.param == "redis": return empty_redis_repo # return empty_json_repo @@ -363,7 +358,7 @@ def test_use_mnemonic_expired_recovery_key( some_tokens_repo, ): repo = some_tokens_repo - expiration = five_minutes_into_past() + expiration = ten_minutes_into_past() assert repo.create_recovery_key(uses_left=2, expiration=expiration) is not None recovery_key = repo.get_recovery_key() # TODO: do not ignore timezone once json backend is deleted @@ -543,7 +538,7 @@ def test_use_mnemonic_expired_new_device_key( some_tokens_repo, ): repo = some_tokens_repo - expiration = five_minutes_into_past() + expiration = ten_minutes_into_past() key = repo.get_new_device_key() assert key is not None @@ -582,24 +577,3 @@ def assert_identical( assert token in tokens_b assert repo_a.get_recovery_key() == repo_b.get_recovery_key() assert repo_a._get_stored_new_device_key() == repo_b._get_stored_new_device_key() - - -def clone_to_redis(repo: JsonTokensRepository): - other_repo = RedisTokensRepository() - other_repo.clone(repo) - assert_identical(repo, other_repo) - - -# we cannot easily parametrize this unfortunately, since some_tokens and empty_repo cannot coexist -def test_clone_json_to_redis_empty(empty_repo): - repo = empty_repo - if isinstance(repo, JsonTokensRepository): - clone_to_redis(repo) - - -def test_clone_json_to_redis_full(some_tokens_repo): - repo = some_tokens_repo - if isinstance(repo, JsonTokensRepository): - repo.get_new_device_key() - repo.create_recovery_key(five_minutes_into_future(), 2) - clone_to_redis(repo) diff --git a/tests/test_repository/test_tokens_repository/empty_keys.json b/tests/test_repository/test_tokens_repository/empty_keys.json deleted file mode 100644 index 2131ddf..0000000 --- a/tests/test_repository/test_tokens_repository/empty_keys.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "tokens": [ - { - "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", - "name": "primary_token", - "date": "2022-07-15 17:41:31.675698" - } - ] -} diff --git a/tests/test_repository/test_tokens_repository/null_keys.json b/tests/test_repository/test_tokens_repository/null_keys.json deleted file mode 100644 index 45e6f90..0000000 --- a/tests/test_repository/test_tokens_repository/null_keys.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "tokens": [ - { - "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", - "name": "primary_token", - "date": "2022-07-15 17:41:31.675698" - }, - { - "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", - "name": "second_token", - "date": "2022-07-15 17:41:31.675698Z" - }, - { - "token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", - "name": "third_token", - "date": "2022-07-15T17:41:31.675698Z" - }, - { - "token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", - "name": "forth_token", - "date": "2022-07-15T17:41:31.675698" - } - ], - "recovery_token": null, - "new_device": null -} diff --git a/tests/test_repository/test_tokens_repository/tokens.json b/tests/test_repository/test_tokens_repository/tokens.json deleted file mode 100644 index bb1805c..0000000 --- a/tests/test_repository/test_tokens_repository/tokens.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "tokens": [ - { - "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", - "name": "primary_token", - "date": "2022-07-15 17:41:31.675698" - }, - { - "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", - "name": "second_token", - "date": "2022-07-15 17:41:31.675698Z" - }, - { - "token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", - "name": "third_token", - "date": "2022-07-15T17:41:31.675698Z" - }, - { - "token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", - "name": "forth_token", - "date": "2022-07-15T17:41:31.675698" - } - ], - "recovery_token": { - "token": "ed653e4b8b042b841d285fa7a682fa09e925ddb2d8906f54", - "date": "2022-11-11T11:48:54.228038", - "expiration": null, - "uses_left": 2 - }, - "new_device": { - "token": "2237238de23dc71ab558e317bdb8ff8e", - "date": "2022-10-26 20:50:47.973212", - "expiration": "2022-10-26 21:00:47.974153" - } -} diff --git a/tests/test_services.py b/tests/test_services.py index 65b4dc9..c5eff66 100644 --- a/tests/test_services.py +++ b/tests/test_services.py @@ -19,7 +19,7 @@ from selfprivacy_api.services.test_service import DummyService from selfprivacy_api.services.service import Service, ServiceStatus, StoppedService from selfprivacy_api.services import get_enabled_services -from tests.test_dkim import domain_file, dkim_file, no_dkim_file +from tests.test_dkim import dkim_file, no_dkim_file def test_unimplemented_folders_raises(): diff --git a/tests/test_ssh.py b/tests/test_ssh.py index a688a63..2d0f70d 100644 --- a/tests/test_ssh.py +++ b/tests/test_ssh.py @@ -100,7 +100,7 @@ def test_read_json(possibly_undefined_ssh_settings): assert get_ssh_settings().enable == data["ssh"]["enable"] if "passwordAuthentication" not in data["ssh"].keys(): - assert get_ssh_settings().passwordAuthentication is True + assert get_ssh_settings().passwordAuthentication is False else: assert ( get_ssh_settings().passwordAuthentication @@ -111,7 +111,6 @@ def test_read_json(possibly_undefined_ssh_settings): def test_enabling_disabling_writes_json( possibly_undefined_ssh_settings, ssh_enable_spectrum, password_auth_spectrum ): - original_enable = get_raw_json_ssh_setting("enable") original_password_auth = get_raw_json_ssh_setting("passwordAuthentication") @@ -352,7 +351,6 @@ def test_read_user_keys_from_json(generic_userdata, username): @pytest.mark.parametrize("username", regular_users) def test_adding_user_key_writes_json(generic_userdata, username): - with WriteUserData() as data: user_index = find_user_index_in_json_users(data["users"], username) del data["users"][user_index]["sshKeys"] diff --git a/tests/test_users.py b/tests/test_users.py index 2f613db..3d7f38f 100644 --- a/tests/test_users.py +++ b/tests/test_users.py @@ -1,26 +1,27 @@ from selfprivacy_api.utils import ReadUserData, WriteUserData from selfprivacy_api.actions.users import delete_user + """ A place for user storage tests and other user tests that are not Graphql-specific. """ -# yes it is an incomplete suite. -# It was born in order to not lose things that REST API tests checked for +# yes it is an incomplete suite. +# It was born in order to not lose things that REST API tests checked for # In the future, user storage tests that are not dependent on actual API (graphql or otherwise) go here. + def test_delete_user_writes_json(generic_userdata): delete_user("user2") with ReadUserData() as data: assert data["users"] == [ - { - "username": "user1", - "hashedPassword": "HASHED_PASSWORD_1", - "sshKeys": ["ssh-rsa KEY user1@pc"] - }, - { - "username": "user3", - "hashedPassword": "HASHED_PASSWORD_3", - "sshKeys": ["ssh-rsa KEY user3@pc"] - } - ] - + { + "username": "user1", + "hashedPassword": "HASHED_PASSWORD_1", + "sshKeys": ["ssh-rsa KEY user1@pc"], + }, + { + "username": "user3", + "hashedPassword": "HASHED_PASSWORD_3", + "sshKeys": ["ssh-rsa KEY user3@pc"], + }, + ] From a0eb5d572f90e9558ebb2055f9133d7ab995e83d Mon Sep 17 00:00:00 2001 From: Inex Code Date: Tue, 9 Jan 2024 22:29:40 +0300 Subject: [PATCH 243/246] fix(graphql): Typing of the deprecated move_service function --- selfprivacy_api/graphql/mutations/deprecated_mutations.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/selfprivacy_api/graphql/mutations/deprecated_mutations.py b/selfprivacy_api/graphql/mutations/deprecated_mutations.py index 6d187c6..d9f3e3a 100644 --- a/selfprivacy_api/graphql/mutations/deprecated_mutations.py +++ b/selfprivacy_api/graphql/mutations/deprecated_mutations.py @@ -20,6 +20,7 @@ from selfprivacy_api.graphql.mutations.mutation_interface import ( GenericMutationReturn, ) from selfprivacy_api.graphql.mutations.services_mutations import ( + ServiceJobMutationReturn, ServiceMutationReturn, ServicesMutations, ) @@ -201,7 +202,7 @@ class DeprecatedServicesMutations: "services", ) - move_service: ServiceMutationReturn = deprecated_mutation( + move_service: ServiceJobMutationReturn = deprecated_mutation( ServicesMutations.move_service, "services", ) From e588bef59798acd2c79c2d2cd0c2991f6258fa9f Mon Sep 17 00:00:00 2001 From: Inex Code Date: Wed, 10 Jan 2024 17:17:36 +0300 Subject: [PATCH 244/246] docs: Add a note about how to use experimental nix features --- README.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/README.md b/README.md index 01ffd88..cf87eeb 100644 --- a/README.md +++ b/README.md @@ -54,6 +54,12 @@ tests/test_rest_endpoints/services/test_ssh.py ..................... ============================== 692 passed in 352.76s (0:05:52) =============================== ``` +If you don't have experimental flakes enabled, you can use the following command: + +```console +nix --extra-experimental-features nix-command --extra-experimental-features flakes develop +``` + ## dependencies and dependant modules Current flake inherits nixpkgs from NixOS configuration flake. So there is no need to refer to extra nixpkgs dependency if you want to be aligned with exact NixOS configuration. From 7ec62a8f79c865b7693a8a2c4fc7c27805dad6c3 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 12 Jan 2024 13:27:02 +0000 Subject: [PATCH 245/246] fix(backups): do not autobackup disabled services --- selfprivacy_api/backup/__init__.py | 10 +++++++--- tests/test_backup.py | 11 ++++++++++- 2 files changed, 17 insertions(+), 4 deletions(-) diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 66a4eac..0fa845e 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -731,11 +731,14 @@ class Backups: def is_time_to_backup_service(service: Service, time: datetime): """Returns True if it is time to back up a service""" period = Backups.autobackup_period_minutes() - if not service.can_be_backed_up(): - return False if period is None: return False + if not service.is_enabled(): + return False + if not service.can_be_backed_up(): + return False + last_error = Backups.get_last_backup_error_time(service) if last_error is not None: @@ -743,8 +746,9 @@ class Backups: return False last_backup = Backups.get_last_backed_up(service) + + # Queue a backup immediately if there are no previous backups if last_backup is None: - # queue a backup immediately if there are no previous backups return True if time > last_backup + timedelta(minutes=period): diff --git a/tests/test_backup.py b/tests/test_backup.py index 036dd42..646d9aa 100644 --- a/tests/test_backup.py +++ b/tests/test_backup.py @@ -889,7 +889,7 @@ def backuppable_services() -> list[Service]: return [service for service in get_all_services() if service.can_be_backed_up()] -def test_services_to_back_up(backups, dummy_service): +def test_services_to_autobackup(backups, dummy_service): backup_period = 13 # minutes now = datetime.now(timezone.utc) @@ -911,6 +911,15 @@ def test_services_to_back_up(backups, dummy_service): ] +def test_do_not_autobackup_disabled_services(backups, dummy_service): + now = datetime.now(timezone.utc) + Backups.set_autobackup_period_minutes(3) + assert Backups.is_time_to_backup_service(dummy_service, now) is True + + dummy_service.disable() + assert Backups.is_time_to_backup_service(dummy_service, now) is False + + def test_autobackup_timer_periods(backups, dummy_service): now = datetime.now(timezone.utc) backup_period = 13 # minutes From c38e066507723bd456a38d8fce8e348ffa9c86d7 Mon Sep 17 00:00:00 2001 From: Houkime <> Date: Fri, 12 Jan 2024 14:37:11 +0000 Subject: [PATCH 246/246] test(backups): separate autobackup tests to a file (was a bit of a mess) --- tests/test_autobackup.py | 538 +++++++++++++++++++++++++++++++++++++++ tests/test_backup.py | 513 ------------------------------------- 2 files changed, 538 insertions(+), 513 deletions(-) create mode 100644 tests/test_autobackup.py diff --git a/tests/test_autobackup.py b/tests/test_autobackup.py new file mode 100644 index 0000000..63c625f --- /dev/null +++ b/tests/test_autobackup.py @@ -0,0 +1,538 @@ +import pytest +from copy import copy + +from datetime import datetime, timezone, timedelta + +from selfprivacy_api.jobs import Jobs +from selfprivacy_api.services import Service, get_all_services + +from selfprivacy_api.graphql.common_types.backup import ( + BackupReason, + AutobackupQuotas, +) + +from selfprivacy_api.backup import Backups, Snapshot +from selfprivacy_api.backup.tasks import ( + prune_autobackup_snapshots, +) + +from tests.test_backup import backups + + +def backuppable_services() -> list[Service]: + return [service for service in get_all_services() if service.can_be_backed_up()] + + +def dummy_snapshot(date: datetime): + return Snapshot( + id=str(hash(date)), + service_name="someservice", + created_at=date, + reason=BackupReason.EXPLICIT, + ) + + +def test_no_default_autobackup(backups, dummy_service): + now = datetime.now(timezone.utc) + assert not Backups.is_time_to_backup_service(dummy_service, now) + assert not Backups.is_time_to_backup(now) + + +# --------------------- Timing ------------------------- + + +def test_set_autobackup_period(backups): + assert Backups.autobackup_period_minutes() is None + + Backups.set_autobackup_period_minutes(2) + assert Backups.autobackup_period_minutes() == 2 + + Backups.disable_all_autobackup() + assert Backups.autobackup_period_minutes() is None + + Backups.set_autobackup_period_minutes(3) + assert Backups.autobackup_period_minutes() == 3 + + Backups.set_autobackup_period_minutes(0) + assert Backups.autobackup_period_minutes() is None + + Backups.set_autobackup_period_minutes(3) + assert Backups.autobackup_period_minutes() == 3 + + Backups.set_autobackup_period_minutes(-1) + assert Backups.autobackup_period_minutes() is None + + +def test_autobackup_timer_periods(backups, dummy_service): + now = datetime.now(timezone.utc) + backup_period = 13 # minutes + + assert not Backups.is_time_to_backup_service(dummy_service, now) + assert not Backups.is_time_to_backup(now) + + Backups.set_autobackup_period_minutes(backup_period) + assert Backups.is_time_to_backup_service(dummy_service, now) + assert Backups.is_time_to_backup(now) + + Backups.set_autobackup_period_minutes(0) + assert not Backups.is_time_to_backup_service(dummy_service, now) + assert not Backups.is_time_to_backup(now) + + +def test_autobackup_timer_enabling(backups, dummy_service): + now = datetime.now(timezone.utc) + backup_period = 13 # minutes + dummy_service.set_backuppable(False) + + Backups.set_autobackup_period_minutes(backup_period) + assert Backups.is_time_to_backup( + now + ) # there are other services too, not just our dummy + + # not backuppable service is not backuppable even if period is set + assert not Backups.is_time_to_backup_service(dummy_service, now) + + dummy_service.set_backuppable(True) + assert dummy_service.can_be_backed_up() + assert Backups.is_time_to_backup_service(dummy_service, now) + + Backups.disable_all_autobackup() + assert not Backups.is_time_to_backup_service(dummy_service, now) + assert not Backups.is_time_to_backup(now) + + +def test_autobackup_timing(backups, dummy_service): + backup_period = 13 # minutes + now = datetime.now(timezone.utc) + + Backups.set_autobackup_period_minutes(backup_period) + assert Backups.is_time_to_backup_service(dummy_service, now) + assert Backups.is_time_to_backup(now) + + Backups.back_up(dummy_service) + + now = datetime.now(timezone.utc) + assert not Backups.is_time_to_backup_service(dummy_service, now) + + past = datetime.now(timezone.utc) - timedelta(minutes=1) + assert not Backups.is_time_to_backup_service(dummy_service, past) + + future = datetime.now(timezone.utc) + timedelta(minutes=backup_period + 2) + assert Backups.is_time_to_backup_service(dummy_service, future) + + +# --------------------- What to autobackup and what not to -------------------- + + +def test_services_to_autobackup(backups, dummy_service): + backup_period = 13 # minutes + now = datetime.now(timezone.utc) + + dummy_service.set_backuppable(False) + services = Backups.services_to_back_up(now) + assert len(services) == 0 + + dummy_service.set_backuppable(True) + + services = Backups.services_to_back_up(now) + assert len(services) == 0 + + Backups.set_autobackup_period_minutes(backup_period) + + services = Backups.services_to_back_up(now) + assert len(services) == len(backuppable_services()) + assert dummy_service.get_id() in [ + service.get_id() for service in backuppable_services() + ] + + +def test_do_not_autobackup_disabled_services(backups, dummy_service): + now = datetime.now(timezone.utc) + Backups.set_autobackup_period_minutes(3) + assert Backups.is_time_to_backup_service(dummy_service, now) is True + + dummy_service.disable() + assert Backups.is_time_to_backup_service(dummy_service, now) is False + + +def test_failed_autoback_prevents_more_autobackup(backups, dummy_service): + backup_period = 13 # minutes + now = datetime.now(timezone.utc) + + Backups.set_autobackup_period_minutes(backup_period) + assert Backups.is_time_to_backup_service(dummy_service, now) + + # artificially making an errored out backup job + dummy_service.set_backuppable(False) + with pytest.raises(ValueError): + Backups.back_up(dummy_service) + dummy_service.set_backuppable(True) + + assert Backups.get_last_backed_up(dummy_service) is None + assert Backups.get_last_backup_error_time(dummy_service) is not None + + assert Backups.is_time_to_backup_service(dummy_service, now) is False + + +# --------------------- Quotas and Pruning ------------------------- + + +unlimited_quotas = AutobackupQuotas( + last=-1, + daily=-1, + weekly=-1, + monthly=-1, + yearly=-1, +) + +zero_quotas = AutobackupQuotas( + last=0, + daily=0, + weekly=0, + monthly=0, + yearly=0, +) + +unlimited_quotas = AutobackupQuotas( + last=-1, + daily=-1, + weekly=-1, + monthly=-1, + yearly=-1, +) + +zero_quotas = AutobackupQuotas( + last=0, + daily=0, + weekly=0, + monthly=0, + yearly=0, +) + + +def test_get_empty_quotas(backups): + quotas = Backups.autobackup_quotas() + assert quotas is not None + assert quotas == unlimited_quotas + + +def test_set_quotas(backups): + quotas = AutobackupQuotas( + last=3, + daily=2343, + weekly=343, + monthly=0, + yearly=-34556, + ) + Backups.set_autobackup_quotas(quotas) + assert Backups.autobackup_quotas() == AutobackupQuotas( + last=3, + daily=2343, + weekly=343, + monthly=0, + yearly=-1, + ) + + +def test_set_zero_quotas(backups): + quotas = AutobackupQuotas( + last=0, + daily=0, + weekly=0, + monthly=0, + yearly=0, + ) + Backups.set_autobackup_quotas(quotas) + assert Backups.autobackup_quotas() == zero_quotas + + +def test_set_unlimited_quotas(backups): + quotas = AutobackupQuotas( + last=-1, + daily=-1, + weekly=-1, + monthly=-1, + yearly=-1, + ) + Backups.set_autobackup_quotas(quotas) + assert Backups.autobackup_quotas() == unlimited_quotas + + +def test_set_zero_quotas_after_unlimited(backups): + quotas = AutobackupQuotas( + last=-1, + daily=-1, + weekly=-1, + monthly=-1, + yearly=-1, + ) + Backups.set_autobackup_quotas(quotas) + assert Backups.autobackup_quotas() == unlimited_quotas + + quotas = AutobackupQuotas( + last=0, + daily=0, + weekly=0, + monthly=0, + yearly=0, + ) + Backups.set_autobackup_quotas(quotas) + assert Backups.autobackup_quotas() == zero_quotas + + +def test_autobackup_snapshots_pruning(backups): + # Wednesday, fourth week + now = datetime(year=2023, month=1, day=25, hour=10) + + snaps = [ + dummy_snapshot(now), + dummy_snapshot(now - timedelta(minutes=5)), + dummy_snapshot(now - timedelta(hours=2)), + dummy_snapshot(now - timedelta(hours=5)), + dummy_snapshot(now - timedelta(days=1)), + dummy_snapshot(now - timedelta(days=1, hours=2)), + dummy_snapshot(now - timedelta(days=1, hours=3)), + dummy_snapshot(now - timedelta(days=2)), + dummy_snapshot(now - timedelta(days=7)), + dummy_snapshot(now - timedelta(days=12)), + dummy_snapshot(now - timedelta(days=23)), + dummy_snapshot(now - timedelta(days=28)), + dummy_snapshot(now - timedelta(days=32)), + dummy_snapshot(now - timedelta(days=47)), + dummy_snapshot(now - timedelta(days=64)), + dummy_snapshot(now - timedelta(days=84)), + dummy_snapshot(now - timedelta(days=104)), + dummy_snapshot(now - timedelta(days=365 * 2)), + ] + old_len = len(snaps) + + quotas = copy(unlimited_quotas) + Backups.set_autobackup_quotas(quotas) + assert Backups._prune_snaps_with_quotas(snaps) == snaps + + quotas = copy(zero_quotas) + quotas.last = 2 + quotas.daily = 2 + Backups.set_autobackup_quotas(quotas) + + snaps_to_keep = Backups._prune_snaps_with_quotas(snaps) + assert snaps_to_keep == [ + dummy_snapshot(now), + dummy_snapshot(now - timedelta(minutes=5)), + # dummy_snapshot(now - timedelta(hours=2)), + # dummy_snapshot(now - timedelta(hours=5)), + dummy_snapshot(now - timedelta(days=1)), + # dummy_snapshot(now - timedelta(days=1, hours=2)), + # dummy_snapshot(now - timedelta(days=1, hours=3)), + # dummy_snapshot(now - timedelta(days=2)), + # dummy_snapshot(now - timedelta(days=7)), + # dummy_snapshot(now - timedelta(days=12)), + # dummy_snapshot(now - timedelta(days=23)), + # dummy_snapshot(now - timedelta(days=28)), + # dummy_snapshot(now - timedelta(days=32)), + # dummy_snapshot(now - timedelta(days=47)), + # dummy_snapshot(now - timedelta(days=64)), + # dummy_snapshot(now - timedelta(days=84)), + # dummy_snapshot(now - timedelta(days=104)), + # dummy_snapshot(now - timedelta(days=365 * 2)), + ] + + # checking that this function does not mutate the argument + assert snaps != snaps_to_keep + assert len(snaps) == old_len + + quotas = copy(zero_quotas) + quotas.weekly = 4 + Backups.set_autobackup_quotas(quotas) + + snaps_to_keep = Backups._prune_snaps_with_quotas(snaps) + assert snaps_to_keep == [ + dummy_snapshot(now), + # dummy_snapshot(now - timedelta(minutes=5)), + # dummy_snapshot(now - timedelta(hours=2)), + # dummy_snapshot(now - timedelta(hours=5)), + # dummy_snapshot(now - timedelta(days=1)), + # dummy_snapshot(now - timedelta(days=1, hours=2)), + # dummy_snapshot(now - timedelta(days=1, hours=3)), + # dummy_snapshot(now - timedelta(days=2)), + dummy_snapshot(now - timedelta(days=7)), + dummy_snapshot(now - timedelta(days=12)), + dummy_snapshot(now - timedelta(days=23)), + # dummy_snapshot(now - timedelta(days=28)), + # dummy_snapshot(now - timedelta(days=32)), + # dummy_snapshot(now - timedelta(days=47)), + # dummy_snapshot(now - timedelta(days=64)), + # dummy_snapshot(now - timedelta(days=84)), + # dummy_snapshot(now - timedelta(days=104)), + # dummy_snapshot(now - timedelta(days=365 * 2)), + ] + + quotas = copy(zero_quotas) + quotas.monthly = 7 + Backups.set_autobackup_quotas(quotas) + + snaps_to_keep = Backups._prune_snaps_with_quotas(snaps) + assert snaps_to_keep == [ + dummy_snapshot(now), + # dummy_snapshot(now - timedelta(minutes=5)), + # dummy_snapshot(now - timedelta(hours=2)), + # dummy_snapshot(now - timedelta(hours=5)), + # dummy_snapshot(now - timedelta(days=1)), + # dummy_snapshot(now - timedelta(days=1, hours=2)), + # dummy_snapshot(now - timedelta(days=1, hours=3)), + # dummy_snapshot(now - timedelta(days=2)), + # dummy_snapshot(now - timedelta(days=7)), + # dummy_snapshot(now - timedelta(days=12)), + # dummy_snapshot(now - timedelta(days=23)), + dummy_snapshot(now - timedelta(days=28)), + # dummy_snapshot(now - timedelta(days=32)), + # dummy_snapshot(now - timedelta(days=47)), + dummy_snapshot(now - timedelta(days=64)), + # dummy_snapshot(now - timedelta(days=84)), + dummy_snapshot(now - timedelta(days=104)), + dummy_snapshot(now - timedelta(days=365 * 2)), + ] + + +def test_autobackup_snapshots_pruning_yearly(backups): + snaps = [ + dummy_snapshot(datetime(year=2055, month=3, day=1)), + dummy_snapshot(datetime(year=2055, month=2, day=1)), + dummy_snapshot(datetime(year=2023, month=4, day=1)), + dummy_snapshot(datetime(year=2023, month=3, day=1)), + dummy_snapshot(datetime(year=2023, month=2, day=1)), + dummy_snapshot(datetime(year=2021, month=2, day=1)), + ] + quotas = copy(zero_quotas) + quotas.yearly = 2 + Backups.set_autobackup_quotas(quotas) + + snaps_to_keep = Backups._prune_snaps_with_quotas(snaps) + assert snaps_to_keep == [ + dummy_snapshot(datetime(year=2055, month=3, day=1)), + dummy_snapshot(datetime(year=2023, month=4, day=1)), + ] + + +def test_autobackup_snapshots_pruning_bottleneck(backups): + now = datetime(year=2023, month=1, day=25, hour=10) + snaps = [ + dummy_snapshot(now), + dummy_snapshot(now - timedelta(minutes=5)), + dummy_snapshot(now - timedelta(hours=2)), + dummy_snapshot(now - timedelta(hours=3)), + dummy_snapshot(now - timedelta(hours=4)), + ] + + yearly_quota = copy(zero_quotas) + yearly_quota.yearly = 2 + + monthly_quota = copy(zero_quotas) + monthly_quota.monthly = 2 + + weekly_quota = copy(zero_quotas) + weekly_quota.weekly = 2 + + daily_quota = copy(zero_quotas) + daily_quota.daily = 2 + + last_quota = copy(zero_quotas) + last_quota.last = 1 + last_quota.yearly = 2 + + for quota in [last_quota, yearly_quota, monthly_quota, weekly_quota, daily_quota]: + print(quota) + Backups.set_autobackup_quotas(quota) + snaps_to_keep = Backups._prune_snaps_with_quotas(snaps) + assert snaps_to_keep == [ + dummy_snapshot(now), + # If there is a vacant quota, we should keep the last snapshot even if it doesn't fit + dummy_snapshot(now - timedelta(hours=4)), + ] + + +def test_autobackup_snapshots_pruning_edgeweek(backups): + # jan 1 2023 is Sunday + snaps = [ + dummy_snapshot(datetime(year=2023, month=1, day=6)), + dummy_snapshot(datetime(year=2023, month=1, day=1)), + dummy_snapshot(datetime(year=2022, month=12, day=31)), + dummy_snapshot(datetime(year=2022, month=12, day=30)), + ] + quotas = copy(zero_quotas) + quotas.weekly = 2 + Backups.set_autobackup_quotas(quotas) + + snaps_to_keep = Backups._prune_snaps_with_quotas(snaps) + assert snaps_to_keep == [ + dummy_snapshot(datetime(year=2023, month=1, day=6)), + dummy_snapshot(datetime(year=2023, month=1, day=1)), + ] + + +def test_autobackup_snapshots_pruning_big_gap(backups): + snaps = [ + dummy_snapshot(datetime(year=2023, month=1, day=6)), + dummy_snapshot(datetime(year=2023, month=1, day=2)), + dummy_snapshot(datetime(year=2022, month=10, day=31)), + dummy_snapshot(datetime(year=2022, month=10, day=30)), + ] + quotas = copy(zero_quotas) + quotas.weekly = 2 + Backups.set_autobackup_quotas(quotas) + + snaps_to_keep = Backups._prune_snaps_with_quotas(snaps) + assert snaps_to_keep == [ + dummy_snapshot(datetime(year=2023, month=1, day=6)), + dummy_snapshot(datetime(year=2022, month=10, day=31)), + ] + + +def test_quotas_exceeded_with_too_many_autobackups(backups, dummy_service): + assert Backups.autobackup_quotas() + quota = copy(zero_quotas) + quota.last = 2 + Backups.set_autobackup_quotas(quota) + assert Backups.autobackup_quotas().last == 2 + + snap = Backups.back_up(dummy_service, BackupReason.AUTO) + assert len(Backups.get_snapshots(dummy_service)) == 1 + snap2 = Backups.back_up(dummy_service, BackupReason.AUTO) + assert len(Backups.get_snapshots(dummy_service)) == 2 + snap3 = Backups.back_up(dummy_service, BackupReason.AUTO) + assert len(Backups.get_snapshots(dummy_service)) == 2 + + snaps = Backups.get_snapshots(dummy_service) + assert snap2 in snaps + assert snap3 in snaps + assert snap not in snaps + + quota.last = -1 + Backups.set_autobackup_quotas(quota) + snap4 = Backups.back_up(dummy_service, BackupReason.AUTO) + + snaps = Backups.get_snapshots(dummy_service) + assert len(snaps) == 3 + assert snap4 in snaps + + # Retroactivity + quota.last = 1 + Backups.set_autobackup_quotas(quota) + job = Jobs.add("trimming", "test.autobackup_trimming", "trimming the snaps!") + handle = prune_autobackup_snapshots(job) + handle(blocking=True) + snaps = Backups.get_snapshots(dummy_service) + assert len(snaps) == 1 + + snap5 = Backups.back_up(dummy_service, BackupReason.AUTO) + snaps = Backups.get_snapshots(dummy_service) + assert len(snaps) == 1 + assert snap5 in snaps + + # Explicit snaps are not affected + snap6 = Backups.back_up(dummy_service, BackupReason.EXPLICIT) + + snaps = Backups.get_snapshots(dummy_service) + assert len(snaps) == 2 + assert snap5 in snaps + assert snap6 in snaps diff --git a/tests/test_backup.py b/tests/test_backup.py index 646d9aa..f343feb 100644 --- a/tests/test_backup.py +++ b/tests/test_backup.py @@ -7,25 +7,17 @@ from os import listdir from os import urandom from datetime import datetime, timedelta, timezone -from copy import copy import tempfile from selfprivacy_api.utils.huey import huey -import tempfile -from selfprivacy_api.utils.huey import huey - -from selfprivacy_api.services import Service, get_all_services -from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services.service import ServiceStatus -from selfprivacy_api.services.test_service import DummyService from selfprivacy_api.graphql.queries.providers import BackupProvider from selfprivacy_api.graphql.common_types.backup import ( RestoreStrategy, BackupReason, - AutobackupQuotas, ) from selfprivacy_api.jobs import Jobs, JobStatus @@ -43,7 +35,6 @@ from selfprivacy_api.backup.tasks import ( start_backup, restore_snapshot, reload_snapshot_cache, - prune_autobackup_snapshots, ) from selfprivacy_api.backup.storage import Storage @@ -218,16 +209,6 @@ def test_reinit_after_purge(backups): assert len(Backups.get_all_snapshots()) == 0 -def test_backup_simple_file(raw_dummy_service, file_backup): - # temporarily incomplete - service = raw_dummy_service - assert service is not None - assert file_backup is not None - - name = service.get_id() - file_backup.backupper.init() - - def test_backup_service(dummy_service, backups): id = dummy_service.get_id() assert_job_finished(f"services.{id}.backup", count=0) @@ -281,360 +262,6 @@ def test_backup_reasons(backups, dummy_service): assert snaps[0].reason == BackupReason.AUTO -unlimited_quotas = AutobackupQuotas( - last=-1, - daily=-1, - weekly=-1, - monthly=-1, - yearly=-1, -) - -zero_quotas = AutobackupQuotas( - last=0, - daily=0, - weekly=0, - monthly=0, - yearly=0, -) - - -def test_get_empty_quotas(backups): - quotas = Backups.autobackup_quotas() - assert quotas is not None - assert quotas == unlimited_quotas - - -def test_set_quotas(backups): - quotas = AutobackupQuotas( - last=3, - daily=2343, - weekly=343, - monthly=0, - yearly=-34556, - ) - Backups.set_autobackup_quotas(quotas) - assert Backups.autobackup_quotas() == AutobackupQuotas( - last=3, - daily=2343, - weekly=343, - monthly=0, - yearly=-1, - ) - - -def test_set_zero_quotas(backups): - quotas = AutobackupQuotas( - last=0, - daily=0, - weekly=0, - monthly=0, - yearly=0, - ) - Backups.set_autobackup_quotas(quotas) - assert Backups.autobackup_quotas() == zero_quotas - - -def test_set_unlimited_quotas(backups): - quotas = AutobackupQuotas( - last=-1, - daily=-1, - weekly=-1, - monthly=-1, - yearly=-1, - ) - Backups.set_autobackup_quotas(quotas) - assert Backups.autobackup_quotas() == unlimited_quotas - - -def test_set_zero_quotas_after_unlimited(backups): - quotas = AutobackupQuotas( - last=-1, - daily=-1, - weekly=-1, - monthly=-1, - yearly=-1, - ) - Backups.set_autobackup_quotas(quotas) - assert Backups.autobackup_quotas() == unlimited_quotas - - quotas = AutobackupQuotas( - last=0, - daily=0, - weekly=0, - monthly=0, - yearly=0, - ) - Backups.set_autobackup_quotas(quotas) - assert Backups.autobackup_quotas() == zero_quotas - - -def dummy_snapshot(date: datetime): - return Snapshot( - id=str(hash(date)), - service_name="someservice", - created_at=date, - reason=BackupReason.EXPLICIT, - ) - - -def test_autobackup_snapshots_pruning(backups): - # Wednesday, fourth week - now = datetime(year=2023, month=1, day=25, hour=10) - - snaps = [ - dummy_snapshot(now), - dummy_snapshot(now - timedelta(minutes=5)), - dummy_snapshot(now - timedelta(hours=2)), - dummy_snapshot(now - timedelta(hours=5)), - dummy_snapshot(now - timedelta(days=1)), - dummy_snapshot(now - timedelta(days=1, hours=2)), - dummy_snapshot(now - timedelta(days=1, hours=3)), - dummy_snapshot(now - timedelta(days=2)), - dummy_snapshot(now - timedelta(days=7)), - dummy_snapshot(now - timedelta(days=12)), - dummy_snapshot(now - timedelta(days=23)), - dummy_snapshot(now - timedelta(days=28)), - dummy_snapshot(now - timedelta(days=32)), - dummy_snapshot(now - timedelta(days=47)), - dummy_snapshot(now - timedelta(days=64)), - dummy_snapshot(now - timedelta(days=84)), - dummy_snapshot(now - timedelta(days=104)), - dummy_snapshot(now - timedelta(days=365 * 2)), - ] - old_len = len(snaps) - - quotas = copy(unlimited_quotas) - Backups.set_autobackup_quotas(quotas) - assert Backups._prune_snaps_with_quotas(snaps) == snaps - - quotas = copy(zero_quotas) - quotas.last = 2 - quotas.daily = 2 - Backups.set_autobackup_quotas(quotas) - - snaps_to_keep = Backups._prune_snaps_with_quotas(snaps) - assert snaps_to_keep == [ - dummy_snapshot(now), - dummy_snapshot(now - timedelta(minutes=5)), - # dummy_snapshot(now - timedelta(hours=2)), - # dummy_snapshot(now - timedelta(hours=5)), - dummy_snapshot(now - timedelta(days=1)), - # dummy_snapshot(now - timedelta(days=1, hours=2)), - # dummy_snapshot(now - timedelta(days=1, hours=3)), - # dummy_snapshot(now - timedelta(days=2)), - # dummy_snapshot(now - timedelta(days=7)), - # dummy_snapshot(now - timedelta(days=12)), - # dummy_snapshot(now - timedelta(days=23)), - # dummy_snapshot(now - timedelta(days=28)), - # dummy_snapshot(now - timedelta(days=32)), - # dummy_snapshot(now - timedelta(days=47)), - # dummy_snapshot(now - timedelta(days=64)), - # dummy_snapshot(now - timedelta(days=84)), - # dummy_snapshot(now - timedelta(days=104)), - # dummy_snapshot(now - timedelta(days=365 * 2)), - ] - - # checking that this function does not mutate the argument - assert snaps != snaps_to_keep - assert len(snaps) == old_len - - quotas = copy(zero_quotas) - quotas.weekly = 4 - Backups.set_autobackup_quotas(quotas) - - snaps_to_keep = Backups._prune_snaps_with_quotas(snaps) - assert snaps_to_keep == [ - dummy_snapshot(now), - # dummy_snapshot(now - timedelta(minutes=5)), - # dummy_snapshot(now - timedelta(hours=2)), - # dummy_snapshot(now - timedelta(hours=5)), - # dummy_snapshot(now - timedelta(days=1)), - # dummy_snapshot(now - timedelta(days=1, hours=2)), - # dummy_snapshot(now - timedelta(days=1, hours=3)), - # dummy_snapshot(now - timedelta(days=2)), - dummy_snapshot(now - timedelta(days=7)), - dummy_snapshot(now - timedelta(days=12)), - dummy_snapshot(now - timedelta(days=23)), - # dummy_snapshot(now - timedelta(days=28)), - # dummy_snapshot(now - timedelta(days=32)), - # dummy_snapshot(now - timedelta(days=47)), - # dummy_snapshot(now - timedelta(days=64)), - # dummy_snapshot(now - timedelta(days=84)), - # dummy_snapshot(now - timedelta(days=104)), - # dummy_snapshot(now - timedelta(days=365 * 2)), - ] - - quotas = copy(zero_quotas) - quotas.monthly = 7 - Backups.set_autobackup_quotas(quotas) - - snaps_to_keep = Backups._prune_snaps_with_quotas(snaps) - assert snaps_to_keep == [ - dummy_snapshot(now), - # dummy_snapshot(now - timedelta(minutes=5)), - # dummy_snapshot(now - timedelta(hours=2)), - # dummy_snapshot(now - timedelta(hours=5)), - # dummy_snapshot(now - timedelta(days=1)), - # dummy_snapshot(now - timedelta(days=1, hours=2)), - # dummy_snapshot(now - timedelta(days=1, hours=3)), - # dummy_snapshot(now - timedelta(days=2)), - # dummy_snapshot(now - timedelta(days=7)), - # dummy_snapshot(now - timedelta(days=12)), - # dummy_snapshot(now - timedelta(days=23)), - dummy_snapshot(now - timedelta(days=28)), - # dummy_snapshot(now - timedelta(days=32)), - # dummy_snapshot(now - timedelta(days=47)), - dummy_snapshot(now - timedelta(days=64)), - # dummy_snapshot(now - timedelta(days=84)), - dummy_snapshot(now - timedelta(days=104)), - dummy_snapshot(now - timedelta(days=365 * 2)), - ] - - -def test_autobackup_snapshots_pruning_yearly(backups): - snaps = [ - dummy_snapshot(datetime(year=2055, month=3, day=1)), - dummy_snapshot(datetime(year=2055, month=2, day=1)), - dummy_snapshot(datetime(year=2023, month=4, day=1)), - dummy_snapshot(datetime(year=2023, month=3, day=1)), - dummy_snapshot(datetime(year=2023, month=2, day=1)), - dummy_snapshot(datetime(year=2021, month=2, day=1)), - ] - quotas = copy(zero_quotas) - quotas.yearly = 2 - Backups.set_autobackup_quotas(quotas) - - snaps_to_keep = Backups._prune_snaps_with_quotas(snaps) - assert snaps_to_keep == [ - dummy_snapshot(datetime(year=2055, month=3, day=1)), - dummy_snapshot(datetime(year=2023, month=4, day=1)), - ] - - -def test_autobackup_snapshots_pruning_bottleneck(backups): - now = datetime(year=2023, month=1, day=25, hour=10) - snaps = [ - dummy_snapshot(now), - dummy_snapshot(now - timedelta(minutes=5)), - dummy_snapshot(now - timedelta(hours=2)), - dummy_snapshot(now - timedelta(hours=3)), - dummy_snapshot(now - timedelta(hours=4)), - ] - - yearly_quota = copy(zero_quotas) - yearly_quota.yearly = 2 - - monthly_quota = copy(zero_quotas) - monthly_quota.monthly = 2 - - weekly_quota = copy(zero_quotas) - weekly_quota.weekly = 2 - - daily_quota = copy(zero_quotas) - daily_quota.daily = 2 - - last_quota = copy(zero_quotas) - last_quota.last = 1 - last_quota.yearly = 2 - - for quota in [last_quota, yearly_quota, monthly_quota, weekly_quota, daily_quota]: - print(quota) - Backups.set_autobackup_quotas(quota) - snaps_to_keep = Backups._prune_snaps_with_quotas(snaps) - assert snaps_to_keep == [ - dummy_snapshot(now), - # If there is a vacant quota, we should keep the last snapshot even if it doesn't fit - dummy_snapshot(now - timedelta(hours=4)), - ] - - -def test_autobackup_snapshots_pruning_edgeweek(backups): - # jan 1 2023 is Sunday - snaps = [ - dummy_snapshot(datetime(year=2023, month=1, day=6)), - dummy_snapshot(datetime(year=2023, month=1, day=1)), - dummy_snapshot(datetime(year=2022, month=12, day=31)), - dummy_snapshot(datetime(year=2022, month=12, day=30)), - ] - quotas = copy(zero_quotas) - quotas.weekly = 2 - Backups.set_autobackup_quotas(quotas) - - snaps_to_keep = Backups._prune_snaps_with_quotas(snaps) - assert snaps_to_keep == [ - dummy_snapshot(datetime(year=2023, month=1, day=6)), - dummy_snapshot(datetime(year=2023, month=1, day=1)), - ] - - -def test_autobackup_snapshots_pruning_big_gap(backups): - snaps = [ - dummy_snapshot(datetime(year=2023, month=1, day=6)), - dummy_snapshot(datetime(year=2023, month=1, day=2)), - dummy_snapshot(datetime(year=2022, month=10, day=31)), - dummy_snapshot(datetime(year=2022, month=10, day=30)), - ] - quotas = copy(zero_quotas) - quotas.weekly = 2 - Backups.set_autobackup_quotas(quotas) - - snaps_to_keep = Backups._prune_snaps_with_quotas(snaps) - assert snaps_to_keep == [ - dummy_snapshot(datetime(year=2023, month=1, day=6)), - dummy_snapshot(datetime(year=2022, month=10, day=31)), - ] - - -def test_too_many_auto(backups, dummy_service): - assert Backups.autobackup_quotas() - quota = copy(zero_quotas) - quota.last = 2 - Backups.set_autobackup_quotas(quota) - assert Backups.autobackup_quotas().last == 2 - - snap = Backups.back_up(dummy_service, BackupReason.AUTO) - assert len(Backups.get_snapshots(dummy_service)) == 1 - snap2 = Backups.back_up(dummy_service, BackupReason.AUTO) - assert len(Backups.get_snapshots(dummy_service)) == 2 - snap3 = Backups.back_up(dummy_service, BackupReason.AUTO) - assert len(Backups.get_snapshots(dummy_service)) == 2 - - snaps = Backups.get_snapshots(dummy_service) - assert snap2 in snaps - assert snap3 in snaps - assert snap not in snaps - - quota.last = -1 - Backups.set_autobackup_quotas(quota) - snap4 = Backups.back_up(dummy_service, BackupReason.AUTO) - - snaps = Backups.get_snapshots(dummy_service) - assert len(snaps) == 3 - assert snap4 in snaps - - # Retroactivity - quota.last = 1 - Backups.set_autobackup_quotas(quota) - job = Jobs.add("trimming", "test.autobackup_trimming", "trimming the snaps!") - handle = prune_autobackup_snapshots(job) - handle(blocking=True) - snaps = Backups.get_snapshots(dummy_service) - assert len(snaps) == 1 - - snap5 = Backups.back_up(dummy_service, BackupReason.AUTO) - snaps = Backups.get_snapshots(dummy_service) - assert len(snaps) == 1 - assert snap5 in snaps - - # Explicit snaps are not affected - snap6 = Backups.back_up(dummy_service, BackupReason.EXPLICIT) - - snaps = Backups.get_snapshots(dummy_service) - assert len(snaps) == 2 - assert snap5 in snaps - assert snap6 in snaps - - def folder_files(folder): return [ path.join(folder, filename) @@ -857,127 +484,6 @@ def test_restore_snapshot_task( assert len(snaps) == 1 -def test_set_autobackup_period(backups): - assert Backups.autobackup_period_minutes() is None - - Backups.set_autobackup_period_minutes(2) - assert Backups.autobackup_period_minutes() == 2 - - Backups.disable_all_autobackup() - assert Backups.autobackup_period_minutes() is None - - Backups.set_autobackup_period_minutes(3) - assert Backups.autobackup_period_minutes() == 3 - - Backups.set_autobackup_period_minutes(0) - assert Backups.autobackup_period_minutes() is None - - Backups.set_autobackup_period_minutes(3) - assert Backups.autobackup_period_minutes() == 3 - - Backups.set_autobackup_period_minutes(-1) - assert Backups.autobackup_period_minutes() is None - - -def test_no_default_autobackup(backups, dummy_service): - now = datetime.now(timezone.utc) - assert not Backups.is_time_to_backup_service(dummy_service, now) - assert not Backups.is_time_to_backup(now) - - -def backuppable_services() -> list[Service]: - return [service for service in get_all_services() if service.can_be_backed_up()] - - -def test_services_to_autobackup(backups, dummy_service): - backup_period = 13 # minutes - now = datetime.now(timezone.utc) - - dummy_service.set_backuppable(False) - services = Backups.services_to_back_up(now) - assert len(services) == 0 - - dummy_service.set_backuppable(True) - - services = Backups.services_to_back_up(now) - assert len(services) == 0 - - Backups.set_autobackup_period_minutes(backup_period) - - services = Backups.services_to_back_up(now) - assert len(services) == len(backuppable_services()) - assert dummy_service.get_id() in [ - service.get_id() for service in backuppable_services() - ] - - -def test_do_not_autobackup_disabled_services(backups, dummy_service): - now = datetime.now(timezone.utc) - Backups.set_autobackup_period_minutes(3) - assert Backups.is_time_to_backup_service(dummy_service, now) is True - - dummy_service.disable() - assert Backups.is_time_to_backup_service(dummy_service, now) is False - - -def test_autobackup_timer_periods(backups, dummy_service): - now = datetime.now(timezone.utc) - backup_period = 13 # minutes - - assert not Backups.is_time_to_backup_service(dummy_service, now) - assert not Backups.is_time_to_backup(now) - - Backups.set_autobackup_period_minutes(backup_period) - assert Backups.is_time_to_backup_service(dummy_service, now) - assert Backups.is_time_to_backup(now) - - Backups.set_autobackup_period_minutes(0) - assert not Backups.is_time_to_backup_service(dummy_service, now) - assert not Backups.is_time_to_backup(now) - - -def test_autobackup_timer_enabling(backups, dummy_service): - now = datetime.now(timezone.utc) - backup_period = 13 # minutes - dummy_service.set_backuppable(False) - - Backups.set_autobackup_period_minutes(backup_period) - assert Backups.is_time_to_backup( - now - ) # there are other services too, not just our dummy - - # not backuppable service is not backuppable even if period is set - assert not Backups.is_time_to_backup_service(dummy_service, now) - - dummy_service.set_backuppable(True) - assert dummy_service.can_be_backed_up() - assert Backups.is_time_to_backup_service(dummy_service, now) - - Backups.disable_all_autobackup() - assert not Backups.is_time_to_backup_service(dummy_service, now) - assert not Backups.is_time_to_backup(now) - - -def test_autobackup_timing(backups, dummy_service): - backup_period = 13 # minutes - now = datetime.now(timezone.utc) - - Backups.set_autobackup_period_minutes(backup_period) - assert Backups.is_time_to_backup_service(dummy_service, now) - assert Backups.is_time_to_backup(now) - - Backups.back_up(dummy_service) - - now = datetime.now(timezone.utc) - assert not Backups.is_time_to_backup_service(dummy_service, now) - - past = datetime.now(timezone.utc) - timedelta(minutes=1) - assert not Backups.is_time_to_backup_service(dummy_service, past) - - future = datetime.now(timezone.utc) + timedelta(minutes=backup_period + 2) - assert Backups.is_time_to_backup_service(dummy_service, future) - - def test_backup_unbackuppable(backups, dummy_service): dummy_service.set_backuppable(False) assert dummy_service.can_be_backed_up() is False @@ -985,25 +491,6 @@ def test_backup_unbackuppable(backups, dummy_service): Backups.back_up(dummy_service) -def test_failed_autoback_prevents_more_autobackup(backups, dummy_service): - backup_period = 13 # minutes - now = datetime.now(timezone.utc) - - Backups.set_autobackup_period_minutes(backup_period) - assert Backups.is_time_to_backup_service(dummy_service, now) - - # artificially making an errored out backup job - dummy_service.set_backuppable(False) - with pytest.raises(ValueError): - Backups.back_up(dummy_service) - dummy_service.set_backuppable(True) - - assert Backups.get_last_backed_up(dummy_service) is None - assert Backups.get_last_backup_error_time(dummy_service) is not None - - assert Backups.is_time_to_backup_service(dummy_service, now) is False - - # Storage def test_snapshots_caching(backups, dummy_service): Backups.back_up(dummy_service)