2023-02-03 17:04:35 +00:00
|
|
|
import pytest
|
2023-07-26 11:54:17 +00:00
|
|
|
import os
|
2023-02-03 19:09:24 +00:00
|
|
|
import os.path as path
|
2023-02-20 16:09:01 +00:00
|
|
|
from os import makedirs
|
2023-02-22 15:58:36 +00:00
|
|
|
from os import remove
|
|
|
|
from os import listdir
|
2023-06-28 11:45:07 +00:00
|
|
|
from os import urandom
|
2023-04-07 15:18:54 +00:00
|
|
|
from datetime import datetime, timedelta, timezone
|
2023-07-03 15:28:12 +00:00
|
|
|
from subprocess import Popen
|
2023-02-03 17:04:35 +00:00
|
|
|
|
2023-04-12 17:18:12 +00:00
|
|
|
import selfprivacy_api.services as services
|
2023-07-19 15:09:49 +00:00
|
|
|
from selfprivacy_api.services import Service, get_all_services
|
2023-07-03 12:15:36 +00:00
|
|
|
|
2023-04-12 17:18:12 +00:00
|
|
|
from selfprivacy_api.services import get_service_by_id
|
2023-02-03 17:04:35 +00:00
|
|
|
from selfprivacy_api.services.test_service import DummyService
|
2023-04-10 13:22:33 +00:00
|
|
|
from selfprivacy_api.graphql.queries.providers import BackupProvider
|
2023-07-07 12:49:52 +00:00
|
|
|
from selfprivacy_api.graphql.common_types.backup import RestoreStrategy
|
2023-04-21 12:19:59 +00:00
|
|
|
from selfprivacy_api.jobs import Jobs, JobStatus
|
2023-02-22 13:35:55 +00:00
|
|
|
|
2023-07-05 13:13:30 +00:00
|
|
|
from selfprivacy_api.models.backup.snapshot import Snapshot
|
|
|
|
|
2023-07-26 11:54:17 +00:00
|
|
|
from selfprivacy_api.backup import Backups, BACKUP_PROVIDER_ENVS
|
2023-02-01 11:58:55 +00:00
|
|
|
import selfprivacy_api.backup.providers as providers
|
|
|
|
from selfprivacy_api.backup.providers import AbstractBackupProvider
|
|
|
|
from selfprivacy_api.backup.providers.backblaze import Backblaze
|
2023-07-26 11:54:17 +00:00
|
|
|
from selfprivacy_api.backup.providers.none import NoBackups
|
2023-07-03 13:28:23 +00:00
|
|
|
from selfprivacy_api.backup.util import sync
|
2023-07-03 15:28:12 +00:00
|
|
|
from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackupper
|
2023-07-14 11:41:03 +00:00
|
|
|
from selfprivacy_api.backup.jobs import add_backup_job, add_restore_job
|
2023-06-19 11:09:10 +00:00
|
|
|
|
2023-07-03 12:15:36 +00:00
|
|
|
|
2023-04-19 15:09:06 +00:00
|
|
|
from selfprivacy_api.backup.tasks import start_backup, restore_snapshot
|
2023-04-10 13:22:33 +00:00
|
|
|
from selfprivacy_api.backup.storage import Storage
|
2023-04-21 12:19:59 +00:00
|
|
|
from selfprivacy_api.backup.jobs import get_backup_job
|
2023-04-10 13:22:33 +00:00
|
|
|
|
2023-02-01 11:58:55 +00:00
|
|
|
|
2023-02-03 19:09:24 +00:00
|
|
|
TESTFILE_BODY = "testytest!"
|
2023-04-14 12:40:41 +00:00
|
|
|
TESTFILE_2_BODY = "testissimo!"
|
2023-02-08 14:05:25 +00:00
|
|
|
REPO_NAME = "test_backup"
|
2023-02-03 19:09:24 +00:00
|
|
|
|
|
|
|
|
2023-07-26 14:26:04 +00:00
|
|
|
def prepare_localfile_backups(temp_dir):
|
|
|
|
test_repo_path = path.join(temp_dir, "totallyunrelated")
|
|
|
|
assert not path.exists(test_repo_path)
|
|
|
|
Backups.set_localfile_repo(test_repo_path)
|
|
|
|
|
|
|
|
|
2023-02-20 16:09:01 +00:00
|
|
|
@pytest.fixture(scope="function")
|
2023-07-26 14:26:04 +00:00
|
|
|
def backups_local(tmpdir):
|
2023-03-29 11:15:38 +00:00
|
|
|
Backups.reset()
|
2023-07-26 14:26:04 +00:00
|
|
|
prepare_localfile_backups(tmpdir)
|
|
|
|
Jobs.reset()
|
|
|
|
Backups.init_repo()
|
2023-03-29 11:15:38 +00:00
|
|
|
|
2023-03-13 19:03:41 +00:00
|
|
|
|
2023-07-26 14:26:04 +00:00
|
|
|
@pytest.fixture(scope="function")
|
|
|
|
def backups(tmpdir):
|
|
|
|
# for those tests that are supposed to pass with any repo
|
|
|
|
Backups.reset()
|
|
|
|
if BACKUP_PROVIDER_ENVS["kind"] in os.environ.keys():
|
|
|
|
Backups.set_provider_from_envs()
|
|
|
|
else:
|
|
|
|
prepare_localfile_backups(tmpdir)
|
2023-04-21 12:19:59 +00:00
|
|
|
Jobs.reset()
|
2023-07-26 14:26:04 +00:00
|
|
|
# assert not repo_path
|
|
|
|
|
|
|
|
Backups.init_repo()
|
|
|
|
yield
|
2023-07-26 16:52:58 +00:00
|
|
|
Backups.erase_repo()
|
2023-04-21 12:19:59 +00:00
|
|
|
|
2023-03-13 19:03:41 +00:00
|
|
|
|
|
|
|
@pytest.fixture()
|
|
|
|
def backups_backblaze(generic_userdata):
|
2023-06-16 15:09:39 +00:00
|
|
|
Backups.reset(reset_json=False)
|
2023-02-20 16:09:01 +00:00
|
|
|
|
|
|
|
|
2023-02-03 17:04:35 +00:00
|
|
|
@pytest.fixture()
|
2023-07-12 12:10:40 +00:00
|
|
|
def raw_dummy_service(tmpdir):
|
2023-04-14 12:40:41 +00:00
|
|
|
dirnames = ["test_service", "also_test_service"]
|
|
|
|
service_dirs = []
|
|
|
|
for d in dirnames:
|
|
|
|
service_dir = path.join(tmpdir, d)
|
|
|
|
makedirs(service_dir)
|
|
|
|
service_dirs.append(service_dir)
|
|
|
|
|
|
|
|
testfile_path_1 = path.join(service_dirs[0], "testfile.txt")
|
|
|
|
with open(testfile_path_1, "w") as file:
|
2023-02-03 19:09:24 +00:00
|
|
|
file.write(TESTFILE_BODY)
|
2023-02-08 14:05:25 +00:00
|
|
|
|
2023-04-14 12:40:41 +00:00
|
|
|
testfile_path_2 = path.join(service_dirs[1], "testfile2.txt")
|
|
|
|
with open(testfile_path_2, "w") as file:
|
|
|
|
file.write(TESTFILE_2_BODY)
|
|
|
|
|
2023-04-14 10:32:14 +00:00
|
|
|
# we need this to not change get_folders() much
|
2023-04-14 12:40:41 +00:00
|
|
|
class TestDummyService(DummyService, folders=service_dirs):
|
2023-02-08 14:05:25 +00:00
|
|
|
pass
|
|
|
|
|
2023-02-20 10:35:51 +00:00
|
|
|
service = TestDummyService()
|
2023-02-20 16:09:01 +00:00
|
|
|
return service
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture()
|
2023-07-03 12:15:36 +00:00
|
|
|
def dummy_service(tmpdir, backups, raw_dummy_service) -> Service:
|
2023-02-20 16:09:01 +00:00
|
|
|
service = raw_dummy_service
|
2023-04-12 17:18:12 +00:00
|
|
|
|
|
|
|
# register our service
|
|
|
|
services.services.append(service)
|
|
|
|
|
|
|
|
assert get_service_by_id(service.get_id()) is not None
|
2023-07-03 20:41:52 +00:00
|
|
|
yield service
|
|
|
|
|
|
|
|
# cleanup because apparently it matters wrt tasks
|
|
|
|
services.services.remove(service)
|
2023-02-03 17:04:35 +00:00
|
|
|
|
|
|
|
|
2023-02-03 18:49:24 +00:00
|
|
|
@pytest.fixture()
|
2023-02-08 14:05:25 +00:00
|
|
|
def memory_backup() -> AbstractBackupProvider:
|
2023-02-03 18:49:24 +00:00
|
|
|
ProviderClass = providers.get_provider(BackupProvider.MEMORY)
|
|
|
|
assert ProviderClass is not None
|
|
|
|
memory_provider = ProviderClass(login="", key="")
|
|
|
|
assert memory_provider is not None
|
|
|
|
return memory_provider
|
|
|
|
|
|
|
|
|
2023-02-20 11:50:52 +00:00
|
|
|
@pytest.fixture()
|
|
|
|
def file_backup(tmpdir) -> AbstractBackupProvider:
|
|
|
|
test_repo_path = path.join(tmpdir, "test_repo")
|
|
|
|
ProviderClass = providers.get_provider(BackupProvider.FILE)
|
|
|
|
assert ProviderClass is not None
|
2023-06-14 14:07:51 +00:00
|
|
|
provider = ProviderClass(location=test_repo_path)
|
2023-02-20 11:50:52 +00:00
|
|
|
assert provider is not None
|
|
|
|
return provider
|
|
|
|
|
|
|
|
|
2023-03-10 14:14:41 +00:00
|
|
|
def test_config_load(generic_userdata):
|
2023-06-16 15:09:39 +00:00
|
|
|
Backups.reset(reset_json=False)
|
2023-03-29 11:15:38 +00:00
|
|
|
provider = Backups.provider()
|
2023-03-10 14:14:41 +00:00
|
|
|
|
|
|
|
assert provider is not None
|
|
|
|
assert isinstance(provider, Backblaze)
|
|
|
|
assert provider.login == "ID"
|
|
|
|
assert provider.key == "KEY"
|
2023-06-16 15:19:22 +00:00
|
|
|
assert provider.location == "selfprivacy"
|
2023-07-26 11:54:17 +00:00
|
|
|
|
|
|
|
assert provider.backupper.account == "ID"
|
|
|
|
assert provider.backupper.key == "KEY"
|
|
|
|
|
|
|
|
|
|
|
|
def test_reset_sets_to_none1():
|
|
|
|
Backups.reset()
|
|
|
|
provider = Backups.provider()
|
|
|
|
assert provider is not None
|
|
|
|
assert isinstance(provider, NoBackups)
|
|
|
|
|
|
|
|
|
|
|
|
def test_reset_sets_to_none2(backups):
|
|
|
|
# now with something set up first^^^
|
|
|
|
Backups.reset()
|
|
|
|
provider = Backups.provider()
|
|
|
|
assert provider is not None
|
|
|
|
assert isinstance(provider, NoBackups)
|
|
|
|
|
|
|
|
|
|
|
|
def test_setting_from_envs(tmpdir):
|
|
|
|
Backups.reset()
|
2023-07-26 14:26:04 +00:00
|
|
|
environment_stash = {}
|
|
|
|
if BACKUP_PROVIDER_ENVS["kind"] in os.environ.keys():
|
|
|
|
# we are running under special envs, stash them before rewriting them
|
|
|
|
for key in BACKUP_PROVIDER_ENVS.values():
|
|
|
|
environment_stash[key] = os.environ[key]
|
|
|
|
|
2023-07-26 11:54:17 +00:00
|
|
|
os.environ[BACKUP_PROVIDER_ENVS["kind"]] = "BACKBLAZE"
|
|
|
|
os.environ[BACKUP_PROVIDER_ENVS["login"]] = "ID"
|
|
|
|
os.environ[BACKUP_PROVIDER_ENVS["key"]] = "KEY"
|
|
|
|
os.environ[BACKUP_PROVIDER_ENVS["location"]] = "selfprivacy"
|
|
|
|
Backups.set_provider_from_envs()
|
|
|
|
provider = Backups.provider()
|
|
|
|
|
|
|
|
assert provider is not None
|
|
|
|
assert isinstance(provider, Backblaze)
|
|
|
|
assert provider.login == "ID"
|
|
|
|
assert provider.key == "KEY"
|
|
|
|
assert provider.location == "selfprivacy"
|
2023-03-10 14:14:41 +00:00
|
|
|
|
2023-06-23 12:04:33 +00:00
|
|
|
assert provider.backupper.account == "ID"
|
|
|
|
assert provider.backupper.key == "KEY"
|
2023-05-22 16:01:57 +00:00
|
|
|
|
2023-07-26 14:26:04 +00:00
|
|
|
if environment_stash != {}:
|
|
|
|
for key in BACKUP_PROVIDER_ENVS.values():
|
|
|
|
os.environ[key] = environment_stash[key]
|
|
|
|
else:
|
|
|
|
for key in BACKUP_PROVIDER_ENVS.values():
|
|
|
|
del os.environ[key]
|
|
|
|
|
2023-03-10 14:14:41 +00:00
|
|
|
|
2023-06-16 15:19:22 +00:00
|
|
|
def test_json_reset(generic_userdata):
|
|
|
|
Backups.reset(reset_json=False)
|
|
|
|
provider = Backups.provider()
|
|
|
|
assert provider is not None
|
|
|
|
assert isinstance(provider, Backblaze)
|
|
|
|
assert provider.login == "ID"
|
|
|
|
assert provider.key == "KEY"
|
|
|
|
assert provider.location == "selfprivacy"
|
|
|
|
|
|
|
|
Backups.reset()
|
|
|
|
provider = Backups.provider()
|
|
|
|
assert provider is not None
|
2023-06-19 11:09:10 +00:00
|
|
|
assert isinstance(provider, AbstractBackupProvider)
|
2023-06-16 15:19:22 +00:00
|
|
|
assert provider.login == ""
|
|
|
|
assert provider.key == ""
|
|
|
|
assert provider.location == ""
|
|
|
|
assert provider.repo_id == ""
|
|
|
|
|
|
|
|
|
2023-02-01 11:58:55 +00:00
|
|
|
def test_select_backend():
|
|
|
|
provider = providers.get_provider(BackupProvider.BACKBLAZE)
|
|
|
|
assert provider is not None
|
|
|
|
assert provider == Backblaze
|
2023-02-03 17:04:35 +00:00
|
|
|
|
|
|
|
|
2023-02-20 11:50:52 +00:00
|
|
|
def test_file_backend_init(file_backup):
|
2023-06-23 12:04:33 +00:00
|
|
|
file_backup.backupper.init()
|
2023-02-20 11:50:52 +00:00
|
|
|
|
|
|
|
|
2023-07-26 16:45:08 +00:00
|
|
|
def test_reinit_after_purge(backups):
|
|
|
|
assert Backups.is_initted() is True
|
|
|
|
|
|
|
|
Backups.erase_repo()
|
|
|
|
assert Backups.is_initted() is False
|
|
|
|
with pytest.raises(ValueError):
|
|
|
|
Backups.get_all_snapshots()
|
|
|
|
|
|
|
|
Backups.init_repo()
|
|
|
|
assert Backups.is_initted() is True
|
|
|
|
assert len(Backups.get_all_snapshots()) == 0
|
|
|
|
|
|
|
|
|
2023-02-20 16:09:01 +00:00
|
|
|
def test_backup_simple_file(raw_dummy_service, file_backup):
|
2023-02-03 17:04:35 +00:00
|
|
|
# temporarily incomplete
|
2023-02-20 16:09:01 +00:00
|
|
|
service = raw_dummy_service
|
|
|
|
assert service is not None
|
|
|
|
assert file_backup is not None
|
|
|
|
|
|
|
|
name = service.get_id()
|
2023-06-23 12:04:33 +00:00
|
|
|
file_backup.backupper.init()
|
2023-02-08 15:14:08 +00:00
|
|
|
|
|
|
|
|
2023-02-20 16:09:01 +00:00
|
|
|
def test_backup_service(dummy_service, backups):
|
2023-04-24 17:03:56 +00:00
|
|
|
id = dummy_service.get_id()
|
|
|
|
assert_job_finished(f"services.{id}.backup", count=0)
|
2023-04-07 15:18:54 +00:00
|
|
|
assert Backups.get_last_backed_up(dummy_service) is None
|
2023-04-24 17:03:56 +00:00
|
|
|
|
2023-03-29 11:15:38 +00:00
|
|
|
Backups.back_up(dummy_service)
|
2023-02-13 11:16:35 +00:00
|
|
|
|
2023-04-07 15:18:54 +00:00
|
|
|
now = datetime.now(timezone.utc)
|
|
|
|
date = Backups.get_last_backed_up(dummy_service)
|
|
|
|
assert date is not None
|
|
|
|
assert now > date
|
|
|
|
assert now - date < timedelta(minutes=1)
|
|
|
|
|
2023-04-24 17:03:56 +00:00
|
|
|
assert_job_finished(f"services.{id}.backup", count=1)
|
|
|
|
|
2023-02-13 11:16:35 +00:00
|
|
|
|
2023-02-17 15:55:19 +00:00
|
|
|
def test_no_repo(memory_backup):
|
|
|
|
with pytest.raises(ValueError):
|
2023-06-23 12:04:33 +00:00
|
|
|
assert memory_backup.backupper.get_snapshots() == []
|
2023-02-17 15:55:19 +00:00
|
|
|
|
|
|
|
|
2023-02-22 10:25:51 +00:00
|
|
|
def test_one_snapshot(backups, dummy_service):
|
2023-03-29 11:15:38 +00:00
|
|
|
Backups.back_up(dummy_service)
|
2023-02-22 13:35:55 +00:00
|
|
|
|
2023-03-29 11:15:38 +00:00
|
|
|
snaps = Backups.get_snapshots(dummy_service)
|
2023-02-22 13:35:55 +00:00
|
|
|
assert len(snaps) == 1
|
|
|
|
snap = snaps[0]
|
|
|
|
assert snap.service_name == dummy_service.get_id()
|
2023-02-22 15:58:36 +00:00
|
|
|
|
|
|
|
|
2023-04-03 17:23:16 +00:00
|
|
|
def test_backup_returns_snapshot(backups, dummy_service):
|
2023-04-14 11:20:03 +00:00
|
|
|
service_folders = dummy_service.get_folders()
|
2023-04-03 17:23:16 +00:00
|
|
|
provider = Backups.provider()
|
|
|
|
name = dummy_service.get_id()
|
2023-06-23 12:04:33 +00:00
|
|
|
snapshot = provider.backupper.start_backup(service_folders, name)
|
2023-04-03 17:23:16 +00:00
|
|
|
|
|
|
|
assert snapshot.id is not None
|
2023-07-21 11:11:24 +00:00
|
|
|
assert len(snapshot.id) == len(Backups.get_all_snapshots()[0].id)
|
|
|
|
assert Backups.get_snapshot_by_id(snapshot.id) is not None
|
2023-04-03 17:23:16 +00:00
|
|
|
assert snapshot.service_name == name
|
|
|
|
assert snapshot.created_at is not None
|
|
|
|
|
|
|
|
|
2023-07-03 12:54:43 +00:00
|
|
|
def folder_files(folder):
|
|
|
|
return [
|
|
|
|
path.join(folder, filename)
|
|
|
|
for filename in listdir(folder)
|
|
|
|
if filename is not None
|
|
|
|
]
|
|
|
|
|
|
|
|
|
2023-04-19 15:09:06 +00:00
|
|
|
def service_files(service):
|
|
|
|
result = []
|
|
|
|
for service_folder in service.get_folders():
|
2023-07-03 12:54:43 +00:00
|
|
|
result.extend(folder_files(service_folder))
|
2023-04-19 15:09:06 +00:00
|
|
|
return result
|
|
|
|
|
|
|
|
|
2023-02-22 15:58:36 +00:00
|
|
|
def test_restore(backups, dummy_service):
|
2023-04-19 15:09:06 +00:00
|
|
|
paths_to_nuke = service_files(dummy_service)
|
2023-04-14 13:06:17 +00:00
|
|
|
contents = []
|
|
|
|
|
2023-04-19 15:09:06 +00:00
|
|
|
for service_file in paths_to_nuke:
|
|
|
|
with open(service_file, "r") as file:
|
2023-04-14 13:06:17 +00:00
|
|
|
contents.append(file.read())
|
2023-02-22 15:58:36 +00:00
|
|
|
|
2023-03-29 11:15:38 +00:00
|
|
|
Backups.back_up(dummy_service)
|
|
|
|
snap = Backups.get_snapshots(dummy_service)[0]
|
2023-02-22 15:58:36 +00:00
|
|
|
assert snap is not None
|
|
|
|
|
2023-04-14 13:06:17 +00:00
|
|
|
for p in paths_to_nuke:
|
|
|
|
assert path.exists(p)
|
|
|
|
remove(p)
|
|
|
|
assert not path.exists(p)
|
2023-02-22 15:58:36 +00:00
|
|
|
|
2023-06-26 18:42:26 +00:00
|
|
|
Backups._restore_service_from_snapshot(dummy_service, snap.id)
|
2023-04-14 13:06:17 +00:00
|
|
|
for p, content in zip(paths_to_nuke, contents):
|
|
|
|
assert path.exists(p)
|
|
|
|
with open(p, "r") as file:
|
|
|
|
assert file.read() == content
|
2023-02-22 18:48:08 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_sizing(backups, dummy_service):
|
2023-03-29 11:15:38 +00:00
|
|
|
Backups.back_up(dummy_service)
|
|
|
|
snap = Backups.get_snapshots(dummy_service)[0]
|
2023-06-26 19:41:18 +00:00
|
|
|
size = Backups.snapshot_restored_size(snap.id)
|
2023-02-22 18:48:08 +00:00
|
|
|
assert size is not None
|
|
|
|
assert size > 0
|
2023-03-13 19:03:41 +00:00
|
|
|
|
|
|
|
|
2023-07-26 14:26:04 +00:00
|
|
|
def test_init_tracking(backups, tmpdir):
|
|
|
|
assert Backups.is_initted() is True
|
|
|
|
Backups.reset()
|
2023-05-29 16:50:14 +00:00
|
|
|
assert Backups.is_initted() is False
|
2023-07-26 14:26:04 +00:00
|
|
|
separate_dir = tmpdir / "out_of_the_way"
|
|
|
|
prepare_localfile_backups(separate_dir)
|
2023-05-29 16:50:14 +00:00
|
|
|
Backups.init_repo()
|
2023-03-14 00:39:15 +00:00
|
|
|
|
2023-05-29 16:50:14 +00:00
|
|
|
assert Backups.is_initted() is True
|
2023-03-29 11:45:52 +00:00
|
|
|
|
|
|
|
|
2023-05-17 20:02:21 +00:00
|
|
|
def finished_jobs():
|
|
|
|
return [job for job in Jobs.get_jobs() if job.status is JobStatus.FINISHED]
|
|
|
|
|
|
|
|
|
2023-04-24 16:50:22 +00:00
|
|
|
def assert_job_finished(job_type, count):
|
2023-05-17 20:02:21 +00:00
|
|
|
finished_types = [job.type_id for job in finished_jobs()]
|
2023-04-24 16:50:22 +00:00
|
|
|
assert finished_types.count(job_type) == count
|
|
|
|
|
|
|
|
|
2023-05-08 12:43:11 +00:00
|
|
|
def assert_job_has_run(job_type):
|
2023-05-17 20:02:21 +00:00
|
|
|
job = [job for job in finished_jobs() if job.type_id == job_type][0]
|
2023-05-08 12:43:11 +00:00
|
|
|
assert JobStatus.RUNNING in Jobs.status_updates(job)
|
|
|
|
|
|
|
|
|
2023-06-28 11:45:07 +00:00
|
|
|
def job_progress_updates(job_type):
|
2023-05-17 20:09:29 +00:00
|
|
|
job = [job for job in finished_jobs() if job.type_id == job_type][0]
|
2023-06-28 11:45:07 +00:00
|
|
|
return Jobs.progress_updates(job)
|
|
|
|
|
|
|
|
|
|
|
|
def assert_job_had_progress(job_type):
|
|
|
|
assert len(job_progress_updates(job_type)) > 0
|
|
|
|
|
|
|
|
|
|
|
|
def make_large_file(path: str, bytes: int):
|
|
|
|
with open(path, "wb") as file:
|
|
|
|
file.write(urandom(bytes))
|
2023-05-17 20:09:29 +00:00
|
|
|
|
|
|
|
|
2023-06-05 11:19:01 +00:00
|
|
|
def test_snapshots_by_id(backups, dummy_service):
|
|
|
|
snap1 = Backups.back_up(dummy_service)
|
|
|
|
snap2 = Backups.back_up(dummy_service)
|
|
|
|
snap3 = Backups.back_up(dummy_service)
|
|
|
|
|
|
|
|
assert snap2.id is not None
|
|
|
|
assert snap2.id != ""
|
|
|
|
|
|
|
|
assert len(Backups.get_snapshots(dummy_service)) == 3
|
|
|
|
assert Backups.get_snapshot_by_id(snap2.id).id == snap2.id
|
|
|
|
|
|
|
|
|
2023-07-12 16:43:26 +00:00
|
|
|
@pytest.fixture(params=["instant_server_stop", "delayed_server_stop"])
|
|
|
|
def simulated_service_stopping_delay(request) -> float:
|
|
|
|
if request.param == "instant_server_stop":
|
|
|
|
return 0.0
|
|
|
|
else:
|
|
|
|
return 0.3
|
|
|
|
|
|
|
|
|
|
|
|
def test_backup_service_task(backups, dummy_service, simulated_service_stopping_delay):
|
|
|
|
dummy_service.set_delay(simulated_service_stopping_delay)
|
|
|
|
|
2023-03-29 11:45:52 +00:00
|
|
|
handle = start_backup(dummy_service)
|
|
|
|
handle(blocking=True)
|
|
|
|
|
|
|
|
snaps = Backups.get_snapshots(dummy_service)
|
|
|
|
assert len(snaps) == 1
|
2023-04-03 21:59:14 +00:00
|
|
|
|
2023-04-21 12:19:59 +00:00
|
|
|
id = dummy_service.get_id()
|
2023-05-08 12:43:11 +00:00
|
|
|
job_type_id = f"services.{id}.backup"
|
|
|
|
assert_job_finished(job_type_id, count=1)
|
|
|
|
assert_job_has_run(job_type_id)
|
2023-05-17 20:09:29 +00:00
|
|
|
assert_job_had_progress(job_type_id)
|
2023-04-21 12:19:59 +00:00
|
|
|
|
2023-04-03 21:59:14 +00:00
|
|
|
|
2023-07-05 13:13:30 +00:00
|
|
|
def test_forget_snapshot(backups, dummy_service):
|
|
|
|
snap1 = Backups.back_up(dummy_service)
|
|
|
|
snap2 = Backups.back_up(dummy_service)
|
|
|
|
assert len(Backups.get_snapshots(dummy_service)) == 2
|
|
|
|
|
|
|
|
Backups.forget_snapshot(snap2)
|
|
|
|
assert len(Backups.get_snapshots(dummy_service)) == 1
|
|
|
|
Backups.force_snapshot_cache_reload()
|
|
|
|
assert len(Backups.get_snapshots(dummy_service)) == 1
|
|
|
|
|
|
|
|
assert Backups.get_snapshots(dummy_service)[0].id == snap1.id
|
|
|
|
|
|
|
|
Backups.forget_snapshot(snap1)
|
|
|
|
assert len(Backups.get_snapshots(dummy_service)) == 0
|
|
|
|
|
|
|
|
|
|
|
|
def test_forget_nonexistent_snapshot(backups, dummy_service):
|
|
|
|
bogus = Snapshot(
|
|
|
|
id="gibberjibber", service_name="nohoho", created_at=datetime.now(timezone.utc)
|
|
|
|
)
|
|
|
|
with pytest.raises(ValueError):
|
|
|
|
Backups.forget_snapshot(bogus)
|
|
|
|
|
|
|
|
|
2023-06-28 11:45:07 +00:00
|
|
|
def test_backup_larger_file(backups, dummy_service):
|
|
|
|
dir = path.join(dummy_service.get_folders()[0], "LARGEFILE")
|
|
|
|
mega = 2**20
|
2023-06-28 13:04:57 +00:00
|
|
|
make_large_file(dir, 100 * mega)
|
2023-06-28 11:45:07 +00:00
|
|
|
|
|
|
|
handle = start_backup(dummy_service)
|
|
|
|
handle(blocking=True)
|
|
|
|
|
|
|
|
# results will be slightly different on different machines. if someone has troubles with it on their machine, consider dropping this test.
|
|
|
|
id = dummy_service.get_id()
|
|
|
|
job_type_id = f"services.{id}.backup"
|
|
|
|
assert_job_finished(job_type_id, count=1)
|
|
|
|
assert_job_has_run(job_type_id)
|
|
|
|
updates = job_progress_updates(job_type_id)
|
|
|
|
assert len(updates) > 3
|
2023-06-29 10:44:29 +00:00
|
|
|
assert updates[int((len(updates) - 1) / 2.0)] > 10
|
2023-07-03 15:28:12 +00:00
|
|
|
# clean up a bit
|
2023-07-03 13:29:31 +00:00
|
|
|
remove(dir)
|
2023-06-28 11:45:07 +00:00
|
|
|
|
|
|
|
|
2023-07-07 12:49:52 +00:00
|
|
|
@pytest.fixture(params=["verify", "inplace"])
|
|
|
|
def restore_strategy(request) -> RestoreStrategy:
|
|
|
|
if request.param == "verify":
|
|
|
|
return RestoreStrategy.DOWNLOAD_VERIFY_OVERWRITE
|
|
|
|
else:
|
|
|
|
return RestoreStrategy.INPLACE
|
|
|
|
|
|
|
|
|
2023-07-12 16:53:49 +00:00
|
|
|
def test_restore_snapshot_task(
|
|
|
|
backups, dummy_service, restore_strategy, simulated_service_stopping_delay
|
|
|
|
):
|
|
|
|
dummy_service.set_delay(simulated_service_stopping_delay)
|
|
|
|
|
2023-04-19 15:09:06 +00:00
|
|
|
Backups.back_up(dummy_service)
|
|
|
|
snaps = Backups.get_snapshots(dummy_service)
|
|
|
|
assert len(snaps) == 1
|
|
|
|
|
|
|
|
paths_to_nuke = service_files(dummy_service)
|
|
|
|
contents = []
|
|
|
|
|
|
|
|
for service_file in paths_to_nuke:
|
|
|
|
with open(service_file, "r") as file:
|
|
|
|
contents.append(file.read())
|
|
|
|
|
|
|
|
for p in paths_to_nuke:
|
|
|
|
remove(p)
|
|
|
|
|
2023-07-07 12:49:52 +00:00
|
|
|
handle = restore_snapshot(snaps[0], restore_strategy)
|
2023-04-19 15:09:06 +00:00
|
|
|
handle(blocking=True)
|
|
|
|
|
|
|
|
for p, content in zip(paths_to_nuke, contents):
|
|
|
|
assert path.exists(p)
|
|
|
|
with open(p, "r") as file:
|
|
|
|
assert file.read() == content
|
|
|
|
|
2023-07-07 12:49:52 +00:00
|
|
|
snaps = Backups.get_snapshots(dummy_service)
|
2023-07-19 12:59:29 +00:00
|
|
|
if restore_strategy == RestoreStrategy.INPLACE:
|
|
|
|
assert len(snaps) == 2
|
|
|
|
else:
|
|
|
|
assert len(snaps) == 1
|
2023-07-07 12:49:52 +00:00
|
|
|
|
2023-04-19 15:09:06 +00:00
|
|
|
|
2023-04-03 22:39:04 +00:00
|
|
|
def test_set_autobackup_period(backups):
|
|
|
|
assert Backups.autobackup_period_minutes() is None
|
|
|
|
|
|
|
|
Backups.set_autobackup_period_minutes(2)
|
|
|
|
assert Backups.autobackup_period_minutes() == 2
|
|
|
|
|
|
|
|
Backups.disable_all_autobackup()
|
|
|
|
assert Backups.autobackup_period_minutes() is None
|
|
|
|
|
|
|
|
Backups.set_autobackup_period_minutes(3)
|
|
|
|
assert Backups.autobackup_period_minutes() == 3
|
|
|
|
|
|
|
|
Backups.set_autobackup_period_minutes(0)
|
|
|
|
assert Backups.autobackup_period_minutes() is None
|
|
|
|
|
|
|
|
Backups.set_autobackup_period_minutes(3)
|
|
|
|
assert Backups.autobackup_period_minutes() == 3
|
|
|
|
|
|
|
|
Backups.set_autobackup_period_minutes(-1)
|
|
|
|
assert Backups.autobackup_period_minutes() is None
|
2023-04-10 13:22:33 +00:00
|
|
|
|
|
|
|
|
2023-04-10 15:51:54 +00:00
|
|
|
def test_no_default_autobackup(backups, dummy_service):
|
|
|
|
now = datetime.now(timezone.utc)
|
2023-07-19 15:35:24 +00:00
|
|
|
assert not Backups.is_time_to_backup_service(dummy_service, now)
|
2023-04-10 15:51:54 +00:00
|
|
|
assert not Backups.is_time_to_backup(now)
|
|
|
|
|
|
|
|
|
2023-07-19 15:09:49 +00:00
|
|
|
def backuppable_services() -> list[Service]:
|
|
|
|
return [service for service in get_all_services() if service.can_be_backed_up()]
|
|
|
|
|
|
|
|
|
|
|
|
def test_services_to_back_up(backups, dummy_service):
|
|
|
|
backup_period = 13 # minutes
|
|
|
|
now = datetime.now(timezone.utc)
|
|
|
|
|
|
|
|
dummy_service.set_backuppable(False)
|
|
|
|
services = Backups.services_to_back_up(now)
|
|
|
|
assert len(services) == 0
|
|
|
|
|
|
|
|
dummy_service.set_backuppable(True)
|
|
|
|
|
|
|
|
services = Backups.services_to_back_up(now)
|
|
|
|
assert len(services) == 0
|
|
|
|
|
|
|
|
Backups.set_autobackup_period_minutes(backup_period)
|
|
|
|
|
|
|
|
services = Backups.services_to_back_up(now)
|
|
|
|
assert len(services) == len(backuppable_services())
|
|
|
|
assert dummy_service.get_id() in [
|
|
|
|
service.get_id() for service in backuppable_services()
|
|
|
|
]
|
|
|
|
|
|
|
|
|
2023-04-10 15:51:54 +00:00
|
|
|
def test_autobackup_timer_periods(backups, dummy_service):
|
|
|
|
now = datetime.now(timezone.utc)
|
|
|
|
backup_period = 13 # minutes
|
|
|
|
|
2023-07-19 15:35:24 +00:00
|
|
|
assert not Backups.is_time_to_backup_service(dummy_service, now)
|
2023-04-10 15:51:54 +00:00
|
|
|
assert not Backups.is_time_to_backup(now)
|
|
|
|
|
|
|
|
Backups.set_autobackup_period_minutes(backup_period)
|
2023-07-19 15:35:24 +00:00
|
|
|
assert Backups.is_time_to_backup_service(dummy_service, now)
|
2023-04-10 15:51:54 +00:00
|
|
|
assert Backups.is_time_to_backup(now)
|
|
|
|
|
|
|
|
Backups.set_autobackup_period_minutes(0)
|
2023-07-19 15:35:24 +00:00
|
|
|
assert not Backups.is_time_to_backup_service(dummy_service, now)
|
2023-04-10 15:51:54 +00:00
|
|
|
assert not Backups.is_time_to_backup(now)
|
|
|
|
|
|
|
|
|
|
|
|
def test_autobackup_timer_enabling(backups, dummy_service):
|
|
|
|
now = datetime.now(timezone.utc)
|
|
|
|
backup_period = 13 # minutes
|
2023-07-19 15:09:49 +00:00
|
|
|
dummy_service.set_backuppable(False)
|
2023-04-10 15:51:54 +00:00
|
|
|
|
|
|
|
Backups.set_autobackup_period_minutes(backup_period)
|
2023-07-19 15:09:49 +00:00
|
|
|
assert Backups.is_time_to_backup(
|
|
|
|
now
|
|
|
|
) # there are other services too, not just our dummy
|
|
|
|
|
|
|
|
# not backuppable service is not backuppable even if period is set
|
2023-07-19 15:35:24 +00:00
|
|
|
assert not Backups.is_time_to_backup_service(dummy_service, now)
|
2023-04-10 15:51:54 +00:00
|
|
|
|
2023-07-19 15:09:49 +00:00
|
|
|
dummy_service.set_backuppable(True)
|
|
|
|
assert dummy_service.can_be_backed_up()
|
2023-07-19 15:35:24 +00:00
|
|
|
assert Backups.is_time_to_backup_service(dummy_service, now)
|
2023-04-10 15:51:54 +00:00
|
|
|
|
2023-07-19 15:09:49 +00:00
|
|
|
Backups.disable_all_autobackup()
|
2023-07-19 15:35:24 +00:00
|
|
|
assert not Backups.is_time_to_backup_service(dummy_service, now)
|
2023-04-10 15:51:54 +00:00
|
|
|
assert not Backups.is_time_to_backup(now)
|
|
|
|
|
|
|
|
|
|
|
|
def test_autobackup_timing(backups, dummy_service):
|
|
|
|
backup_period = 13 # minutes
|
|
|
|
now = datetime.now(timezone.utc)
|
|
|
|
|
|
|
|
Backups.set_autobackup_period_minutes(backup_period)
|
2023-07-19 15:35:24 +00:00
|
|
|
assert Backups.is_time_to_backup_service(dummy_service, now)
|
2023-04-10 15:51:54 +00:00
|
|
|
assert Backups.is_time_to_backup(now)
|
|
|
|
|
|
|
|
Backups.back_up(dummy_service)
|
|
|
|
|
|
|
|
now = datetime.now(timezone.utc)
|
2023-07-19 15:35:24 +00:00
|
|
|
assert not Backups.is_time_to_backup_service(dummy_service, now)
|
2023-04-10 15:51:54 +00:00
|
|
|
|
|
|
|
past = datetime.now(timezone.utc) - timedelta(minutes=1)
|
2023-07-19 15:35:24 +00:00
|
|
|
assert not Backups.is_time_to_backup_service(dummy_service, past)
|
2023-04-10 15:51:54 +00:00
|
|
|
|
|
|
|
future = datetime.now(timezone.utc) + timedelta(minutes=backup_period + 2)
|
2023-07-19 15:35:24 +00:00
|
|
|
assert Backups.is_time_to_backup_service(dummy_service, future)
|
2023-04-10 15:51:54 +00:00
|
|
|
|
|
|
|
|
2023-04-10 13:22:33 +00:00
|
|
|
# Storage
|
|
|
|
def test_snapshots_caching(backups, dummy_service):
|
|
|
|
Backups.back_up(dummy_service)
|
|
|
|
|
|
|
|
# we test indirectly that we do redis calls instead of shell calls
|
|
|
|
start = datetime.now()
|
|
|
|
for i in range(10):
|
|
|
|
snapshots = Backups.get_snapshots(dummy_service)
|
|
|
|
assert len(snapshots) == 1
|
|
|
|
assert datetime.now() - start < timedelta(seconds=0.5)
|
|
|
|
|
|
|
|
cached_snapshots = Storage.get_cached_snapshots()
|
|
|
|
assert len(cached_snapshots) == 1
|
|
|
|
|
|
|
|
Storage.delete_cached_snapshot(cached_snapshots[0])
|
|
|
|
cached_snapshots = Storage.get_cached_snapshots()
|
|
|
|
assert len(cached_snapshots) == 0
|
|
|
|
|
|
|
|
snapshots = Backups.get_snapshots(dummy_service)
|
|
|
|
assert len(snapshots) == 1
|
|
|
|
cached_snapshots = Storage.get_cached_snapshots()
|
|
|
|
assert len(cached_snapshots) == 1
|
|
|
|
|
|
|
|
|
2023-07-26 10:09:27 +00:00
|
|
|
def lowlevel_forget(snapshot_id):
|
|
|
|
Backups.provider().backupper.forget_snapshot(snapshot_id)
|
|
|
|
|
|
|
|
|
|
|
|
# Storage
|
|
|
|
def test_snapshots_cache_invalidation(backups, dummy_service):
|
|
|
|
Backups.back_up(dummy_service)
|
|
|
|
cached_snapshots = Storage.get_cached_snapshots()
|
|
|
|
assert len(cached_snapshots) == 1
|
|
|
|
|
|
|
|
Storage.invalidate_snapshot_storage()
|
|
|
|
cached_snapshots = Storage.get_cached_snapshots()
|
|
|
|
assert len(cached_snapshots) == 0
|
|
|
|
|
|
|
|
Backups.force_snapshot_cache_reload()
|
|
|
|
cached_snapshots = Storage.get_cached_snapshots()
|
|
|
|
assert len(cached_snapshots) == 1
|
|
|
|
snap = cached_snapshots[0]
|
|
|
|
|
|
|
|
lowlevel_forget(snap.id)
|
|
|
|
cached_snapshots = Storage.get_cached_snapshots()
|
|
|
|
assert len(cached_snapshots) == 1
|
|
|
|
|
|
|
|
Backups.force_snapshot_cache_reload()
|
|
|
|
cached_snapshots = Storage.get_cached_snapshots()
|
|
|
|
assert len(cached_snapshots) == 0
|
|
|
|
|
|
|
|
|
2023-04-10 13:22:33 +00:00
|
|
|
# Storage
|
|
|
|
def test_init_tracking_caching(backups, raw_dummy_service):
|
2023-07-26 14:26:04 +00:00
|
|
|
assert Storage.has_init_mark() is True
|
|
|
|
Backups.reset()
|
2023-05-29 16:50:14 +00:00
|
|
|
assert Storage.has_init_mark() is False
|
2023-04-10 13:22:33 +00:00
|
|
|
|
2023-05-29 16:50:14 +00:00
|
|
|
Storage.mark_as_init()
|
2023-04-10 13:22:33 +00:00
|
|
|
|
2023-05-29 16:50:14 +00:00
|
|
|
assert Storage.has_init_mark() is True
|
|
|
|
assert Backups.is_initted() is True
|
2023-04-10 13:22:33 +00:00
|
|
|
|
|
|
|
|
|
|
|
# Storage
|
2023-07-26 14:26:04 +00:00
|
|
|
def test_init_tracking_caching2(backups, tmpdir):
|
|
|
|
assert Storage.has_init_mark() is True
|
|
|
|
Backups.reset()
|
|
|
|
assert Storage.has_init_mark() is False
|
|
|
|
separate_dir = tmpdir / "out_of_the_way"
|
|
|
|
prepare_localfile_backups(separate_dir)
|
2023-05-29 16:50:14 +00:00
|
|
|
assert Storage.has_init_mark() is False
|
2023-04-10 13:22:33 +00:00
|
|
|
|
2023-05-29 16:50:14 +00:00
|
|
|
Backups.init_repo()
|
2023-04-10 13:22:33 +00:00
|
|
|
|
2023-05-29 16:50:14 +00:00
|
|
|
assert Storage.has_init_mark() is True
|
2023-04-10 13:22:33 +00:00
|
|
|
|
|
|
|
|
|
|
|
# Storage
|
|
|
|
def test_provider_storage(backups_backblaze):
|
|
|
|
provider = Backups.provider()
|
|
|
|
|
|
|
|
assert provider is not None
|
|
|
|
|
|
|
|
assert isinstance(provider, Backblaze)
|
|
|
|
assert provider.login == "ID"
|
|
|
|
assert provider.key == "KEY"
|
|
|
|
|
|
|
|
Storage.store_provider(provider)
|
2023-06-26 18:20:22 +00:00
|
|
|
restored_provider = Backups._load_provider_redis()
|
2023-04-10 13:22:33 +00:00
|
|
|
assert isinstance(restored_provider, Backblaze)
|
|
|
|
assert restored_provider.login == "ID"
|
|
|
|
assert restored_provider.key == "KEY"
|
2023-04-12 17:18:12 +00:00
|
|
|
|
|
|
|
|
2023-07-03 12:15:36 +00:00
|
|
|
def test_sync(dummy_service):
|
|
|
|
src = dummy_service.get_folders()[0]
|
|
|
|
dst = dummy_service.get_folders()[1]
|
2023-07-12 12:10:40 +00:00
|
|
|
old_files_src = set(listdir(src))
|
|
|
|
old_files_dst = set(listdir(dst))
|
2023-07-03 12:15:36 +00:00
|
|
|
assert old_files_src != old_files_dst
|
|
|
|
|
2023-07-03 13:28:23 +00:00
|
|
|
sync(src, dst)
|
2023-07-12 12:10:40 +00:00
|
|
|
new_files_src = set(listdir(src))
|
|
|
|
new_files_dst = set(listdir(dst))
|
2023-07-03 12:15:36 +00:00
|
|
|
assert new_files_src == old_files_src
|
|
|
|
assert new_files_dst == new_files_src
|
|
|
|
|
|
|
|
|
|
|
|
def test_sync_nonexistent_src(dummy_service):
|
|
|
|
src = "/var/lib/nonexistentFluffyBunniesOfUnix"
|
|
|
|
dst = dummy_service.get_folders()[1]
|
|
|
|
|
|
|
|
with pytest.raises(ValueError):
|
2023-07-03 13:28:23 +00:00
|
|
|
sync(src, dst)
|
2023-07-03 15:28:12 +00:00
|
|
|
|
|
|
|
|
|
|
|
# Restic lowlevel
|
|
|
|
def test_mount_umount(backups, dummy_service, tmpdir):
|
|
|
|
Backups.back_up(dummy_service)
|
|
|
|
backupper = Backups.provider().backupper
|
|
|
|
assert isinstance(backupper, ResticBackupper)
|
|
|
|
|
|
|
|
mountpoint = tmpdir / "mount"
|
|
|
|
makedirs(mountpoint)
|
|
|
|
assert path.exists(mountpoint)
|
|
|
|
assert len(listdir(mountpoint)) == 0
|
|
|
|
|
|
|
|
handle = backupper.mount_repo(mountpoint)
|
|
|
|
assert len(listdir(mountpoint)) != 0
|
|
|
|
|
|
|
|
backupper.unmount_repo(mountpoint)
|
|
|
|
# handle.terminate()
|
|
|
|
assert len(listdir(mountpoint)) == 0
|
2023-07-14 11:41:03 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_move_blocks_backups(backups, dummy_service, restore_strategy):
|
|
|
|
snap = Backups.back_up(dummy_service)
|
|
|
|
job = Jobs.add(
|
|
|
|
type_id=f"services.{dummy_service.get_id()}.move",
|
|
|
|
name="Move Dummy",
|
|
|
|
description=f"Moving Dummy data to the Rainbow Land",
|
|
|
|
status=JobStatus.RUNNING,
|
|
|
|
)
|
|
|
|
|
|
|
|
with pytest.raises(ValueError):
|
|
|
|
Backups.back_up(dummy_service)
|
|
|
|
|
|
|
|
with pytest.raises(ValueError):
|
|
|
|
Backups.restore_snapshot(snap, restore_strategy)
|
2023-08-07 13:33:10 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_double_lock_unlock(backups, dummy_service):
|
|
|
|
# notice that introducing stale locks is only safe for other tests if we erase repo in between
|
|
|
|
# which we do at the time of writing this test
|
|
|
|
|
|
|
|
Backups.provider().backupper.lock()
|
|
|
|
with pytest.raises(ValueError):
|
|
|
|
Backups.provider().backupper.lock()
|
|
|
|
|
|
|
|
Backups.provider().backupper.unlock()
|
|
|
|
Backups.provider().backupper.lock()
|
|
|
|
|
|
|
|
Backups.provider().backupper.unlock()
|
|
|
|
Backups.provider().backupper.unlock()
|
2023-08-09 13:47:18 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_operations_while_locked(backups, dummy_service):
|
|
|
|
Backups.provider().backupper.lock()
|
|
|
|
snap = Backups.back_up(dummy_service)
|
|
|
|
assert snap is not None
|