mirror of
https://git.selfprivacy.org/SelfPrivacy/selfprivacy-rest-api.git
synced 2024-11-22 20:11:30 +00:00
Merge pull request 'feature(backups): remove all json logic' (#86) from do-not-load-backup-provider-from-json into master
Reviewed-on: https://git.selfprivacy.org/SelfPrivacy/selfprivacy-rest-api/pulls/86 Reviewed-by: Inex Code <inex.code@selfprivacy.org>
This commit is contained in:
commit
9e8326bbcf
|
@ -7,8 +7,6 @@ import os
|
||||||
from os import statvfs
|
from os import statvfs
|
||||||
from typing import Callable, List, Optional
|
from typing import Callable, List, Optional
|
||||||
|
|
||||||
from selfprivacy_api.utils import ReadUserData, WriteUserData
|
|
||||||
|
|
||||||
from selfprivacy_api.services import (
|
from selfprivacy_api.services import (
|
||||||
get_service_by_id,
|
get_service_by_id,
|
||||||
get_all_services,
|
get_all_services,
|
||||||
|
@ -44,12 +42,6 @@ from selfprivacy_api.backup.jobs import (
|
||||||
add_restore_job,
|
add_restore_job,
|
||||||
)
|
)
|
||||||
|
|
||||||
DEFAULT_JSON_PROVIDER = {
|
|
||||||
"provider": "BACKBLAZE",
|
|
||||||
"accountId": "",
|
|
||||||
"accountKey": "",
|
|
||||||
"bucket": "",
|
|
||||||
}
|
|
||||||
|
|
||||||
BACKUP_PROVIDER_ENVS = {
|
BACKUP_PROVIDER_ENVS = {
|
||||||
"kind": "BACKUP_KIND",
|
"kind": "BACKUP_KIND",
|
||||||
|
@ -134,17 +126,11 @@ class Backups:
|
||||||
Storage.store_provider(provider)
|
Storage.store_provider(provider)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def reset(reset_json=True) -> None:
|
def reset() -> None:
|
||||||
"""
|
"""
|
||||||
Deletes all the data about the backup storage provider.
|
Deletes all the data about the backup storage provider.
|
||||||
"""
|
"""
|
||||||
Storage.reset()
|
Storage.reset()
|
||||||
if reset_json:
|
|
||||||
try:
|
|
||||||
Backups._reset_provider_json()
|
|
||||||
except FileNotFoundError:
|
|
||||||
# if there is no userdata file, we do not need to reset it
|
|
||||||
pass
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _lookup_provider() -> AbstractBackupProvider:
|
def _lookup_provider() -> AbstractBackupProvider:
|
||||||
|
@ -152,15 +138,6 @@ class Backups:
|
||||||
if redis_provider is not None:
|
if redis_provider is not None:
|
||||||
return redis_provider
|
return redis_provider
|
||||||
|
|
||||||
try:
|
|
||||||
json_provider = Backups._load_provider_json()
|
|
||||||
except FileNotFoundError:
|
|
||||||
json_provider = None
|
|
||||||
|
|
||||||
if json_provider is not None:
|
|
||||||
Storage.store_provider(json_provider)
|
|
||||||
return json_provider
|
|
||||||
|
|
||||||
none_provider = Backups._construct_provider(
|
none_provider = Backups._construct_provider(
|
||||||
BackupProviderEnum.NONE, login="", key="", location=""
|
BackupProviderEnum.NONE, login="", key="", location=""
|
||||||
)
|
)
|
||||||
|
@ -215,44 +192,6 @@ class Backups:
|
||||||
provider_model.repo_id,
|
provider_model.repo_id,
|
||||||
)
|
)
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _load_provider_json() -> Optional[AbstractBackupProvider]:
|
|
||||||
with ReadUserData() as user_data:
|
|
||||||
provider_dict = {
|
|
||||||
"provider": "",
|
|
||||||
"accountId": "",
|
|
||||||
"accountKey": "",
|
|
||||||
"bucket": "",
|
|
||||||
}
|
|
||||||
|
|
||||||
if "backup" not in user_data.keys():
|
|
||||||
if "backblaze" in user_data.keys():
|
|
||||||
provider_dict.update(user_data["backblaze"])
|
|
||||||
provider_dict["provider"] = "BACKBLAZE"
|
|
||||||
return None
|
|
||||||
else:
|
|
||||||
provider_dict.update(user_data["backup"])
|
|
||||||
|
|
||||||
if provider_dict == DEFAULT_JSON_PROVIDER:
|
|
||||||
return None
|
|
||||||
try:
|
|
||||||
return Backups._construct_provider(
|
|
||||||
kind=BackupProviderEnum[provider_dict["provider"]],
|
|
||||||
login=provider_dict["accountId"],
|
|
||||||
key=provider_dict["accountKey"],
|
|
||||||
location=provider_dict["bucket"],
|
|
||||||
)
|
|
||||||
except KeyError:
|
|
||||||
return None
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _reset_provider_json() -> None:
|
|
||||||
with WriteUserData() as user_data:
|
|
||||||
if "backblaze" in user_data.keys():
|
|
||||||
del user_data["backblaze"]
|
|
||||||
|
|
||||||
user_data["backup"] = DEFAULT_JSON_PROVIDER
|
|
||||||
|
|
||||||
# Init
|
# Init
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
|
|
@ -138,18 +138,17 @@ class Storage:
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def store_provider(provider: AbstractBackupProvider) -> None:
|
def store_provider(provider: AbstractBackupProvider) -> None:
|
||||||
"""Stores backup stroage provider auth data in redis"""
|
"""Stores backup provider auth data in redis"""
|
||||||
store_model_as_hash(
|
model = BackupProviderModel(
|
||||||
redis,
|
|
||||||
REDIS_PROVIDER_KEY,
|
|
||||||
BackupProviderModel(
|
|
||||||
kind=get_kind(provider),
|
kind=get_kind(provider),
|
||||||
login=provider.login,
|
login=provider.login,
|
||||||
key=provider.key,
|
key=provider.key,
|
||||||
location=provider.location,
|
location=provider.location,
|
||||||
repo_id=provider.repo_id,
|
repo_id=provider.repo_id,
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
store_model_as_hash(redis, REDIS_PROVIDER_KEY, model)
|
||||||
|
if Storage.load_provider() != model:
|
||||||
|
raise IOError("could not store the provider model: ", model.dict)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def load_provider() -> Optional[BackupProviderModel]:
|
def load_provider() -> Optional[BackupProviderModel]:
|
||||||
|
|
|
@ -77,11 +77,5 @@
|
||||||
"rootKeys": [
|
"rootKeys": [
|
||||||
"ssh-ed25519 KEY test@pc"
|
"ssh-ed25519 KEY test@pc"
|
||||||
]
|
]
|
||||||
},
|
|
||||||
"backup": {
|
|
||||||
"provider": "BACKBLAZE",
|
|
||||||
"accountId": "ID",
|
|
||||||
"accountKey": "KEY",
|
|
||||||
"bucket": "selfprivacy"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,6 +29,7 @@ import selfprivacy_api.backup.providers as providers
|
||||||
from selfprivacy_api.backup.providers import AbstractBackupProvider
|
from selfprivacy_api.backup.providers import AbstractBackupProvider
|
||||||
from selfprivacy_api.backup.providers.backblaze import Backblaze
|
from selfprivacy_api.backup.providers.backblaze import Backblaze
|
||||||
from selfprivacy_api.backup.providers.none import NoBackups
|
from selfprivacy_api.backup.providers.none import NoBackups
|
||||||
|
from selfprivacy_api.backup.providers import get_kind
|
||||||
from selfprivacy_api.backup.util import sync
|
from selfprivacy_api.backup.util import sync
|
||||||
|
|
||||||
from selfprivacy_api.backup.tasks import (
|
from selfprivacy_api.backup.tasks import (
|
||||||
|
@ -82,11 +83,6 @@ def backups(tmpdir):
|
||||||
Backups.erase_repo()
|
Backups.erase_repo()
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture()
|
|
||||||
def backups_backblaze(generic_userdata):
|
|
||||||
Backups.reset(reset_json=False)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture()
|
@pytest.fixture()
|
||||||
def memory_backup() -> AbstractBackupProvider:
|
def memory_backup() -> AbstractBackupProvider:
|
||||||
ProviderClass = providers.get_provider(BackupProvider.MEMORY)
|
ProviderClass = providers.get_provider(BackupProvider.MEMORY)
|
||||||
|
@ -106,20 +102,6 @@ def file_backup(tmpdir) -> AbstractBackupProvider:
|
||||||
return provider
|
return provider
|
||||||
|
|
||||||
|
|
||||||
def test_config_load(generic_userdata):
|
|
||||||
Backups.reset(reset_json=False)
|
|
||||||
provider = Backups.provider()
|
|
||||||
|
|
||||||
assert provider is not None
|
|
||||||
assert isinstance(provider, Backblaze)
|
|
||||||
assert provider.login == "ID"
|
|
||||||
assert provider.key == "KEY"
|
|
||||||
assert provider.location == "selfprivacy"
|
|
||||||
|
|
||||||
assert provider.backupper.account == "ID"
|
|
||||||
assert provider.backupper.key == "KEY"
|
|
||||||
|
|
||||||
|
|
||||||
def test_reset_sets_to_none1():
|
def test_reset_sets_to_none1():
|
||||||
Backups.reset()
|
Backups.reset()
|
||||||
provider = Backups.provider()
|
provider = Backups.provider()
|
||||||
|
@ -167,25 +149,6 @@ def test_setting_from_envs(tmpdir):
|
||||||
del os.environ[key]
|
del os.environ[key]
|
||||||
|
|
||||||
|
|
||||||
def test_json_reset(generic_userdata):
|
|
||||||
Backups.reset(reset_json=False)
|
|
||||||
provider = Backups.provider()
|
|
||||||
assert provider is not None
|
|
||||||
assert isinstance(provider, Backblaze)
|
|
||||||
assert provider.login == "ID"
|
|
||||||
assert provider.key == "KEY"
|
|
||||||
assert provider.location == "selfprivacy"
|
|
||||||
|
|
||||||
Backups.reset()
|
|
||||||
provider = Backups.provider()
|
|
||||||
assert provider is not None
|
|
||||||
assert isinstance(provider, AbstractBackupProvider)
|
|
||||||
assert provider.login == ""
|
|
||||||
assert provider.key == ""
|
|
||||||
assert provider.location == ""
|
|
||||||
assert provider.repo_id == ""
|
|
||||||
|
|
||||||
|
|
||||||
def test_select_backend():
|
def test_select_backend():
|
||||||
provider = providers.get_provider(BackupProvider.BACKBLAZE)
|
provider = providers.get_provider(BackupProvider.BACKBLAZE)
|
||||||
assert provider is not None
|
assert provider is not None
|
||||||
|
@ -570,20 +533,45 @@ def test_init_tracking_caching2(backups, tmpdir):
|
||||||
|
|
||||||
|
|
||||||
# Storage
|
# Storage
|
||||||
def test_provider_storage(backups_backblaze):
|
def test_provider_storage(backups):
|
||||||
provider = Backups.provider()
|
test_login = "ID"
|
||||||
|
test_key = "KEY"
|
||||||
|
test_location = "selprivacy_bin"
|
||||||
|
|
||||||
assert provider is not None
|
old_provider = Backups.provider()
|
||||||
|
assert old_provider is not None
|
||||||
|
|
||||||
assert isinstance(provider, Backblaze)
|
assert not isinstance(old_provider, Backblaze)
|
||||||
assert provider.login == "ID"
|
assert old_provider.login != test_login
|
||||||
assert provider.key == "KEY"
|
assert old_provider.key != test_key
|
||||||
|
assert old_provider.location != test_location
|
||||||
|
|
||||||
|
test_provider = Backups._construct_provider(
|
||||||
|
kind=BackupProvider.BACKBLAZE, login="ID", key=test_key, location=test_location
|
||||||
|
)
|
||||||
|
|
||||||
|
assert isinstance(test_provider, Backblaze)
|
||||||
|
assert get_kind(test_provider) == "BACKBLAZE"
|
||||||
|
assert test_provider.login == test_login
|
||||||
|
assert test_provider.key == test_key
|
||||||
|
assert test_provider.location == test_location
|
||||||
|
|
||||||
|
Storage.store_provider(test_provider)
|
||||||
|
|
||||||
|
restored_provider_model = Storage.load_provider()
|
||||||
|
assert restored_provider_model.kind == "BACKBLAZE"
|
||||||
|
assert restored_provider_model.login == test_login
|
||||||
|
assert restored_provider_model.key == test_key
|
||||||
|
assert restored_provider_model.location == test_location
|
||||||
|
|
||||||
Storage.store_provider(provider)
|
|
||||||
restored_provider = Backups._load_provider_redis()
|
restored_provider = Backups._load_provider_redis()
|
||||||
assert isinstance(restored_provider, Backblaze)
|
assert isinstance(restored_provider, Backblaze)
|
||||||
assert restored_provider.login == "ID"
|
assert restored_provider.login == test_login
|
||||||
assert restored_provider.key == "KEY"
|
assert restored_provider.key == test_key
|
||||||
|
assert restored_provider.location == test_location
|
||||||
|
|
||||||
|
# Revert our mess so we can teardown ok
|
||||||
|
Storage.store_provider(old_provider)
|
||||||
|
|
||||||
|
|
||||||
def test_sync(dummy_service):
|
def test_sync(dummy_service):
|
||||||
|
|
Loading…
Reference in a new issue