mirror of
https://git.selfprivacy.org/SelfPrivacy/selfprivacy-rest-api.git
synced 2024-11-05 11:03:12 +00:00
Merge branch 'master' into fix-restores-wrt-service-status
This commit is contained in:
commit
ada89a2494
|
@ -9,8 +9,8 @@ from typing import List, TypeVar, Callable
|
||||||
from collections.abc import Iterable
|
from collections.abc import Iterable
|
||||||
from json.decoder import JSONDecodeError
|
from json.decoder import JSONDecodeError
|
||||||
from os.path import exists, join
|
from os.path import exists, join
|
||||||
from os import listdir
|
from os import mkdir
|
||||||
from time import sleep
|
from shutil import rmtree
|
||||||
|
|
||||||
from selfprivacy_api.backup.util import output_yielder, sync
|
from selfprivacy_api.backup.util import output_yielder, sync
|
||||||
from selfprivacy_api.backup.backuppers import AbstractBackupper
|
from selfprivacy_api.backup.backuppers import AbstractBackupper
|
||||||
|
@ -32,12 +32,12 @@ def unlocked_repo(func: T) -> T:
|
||||||
def inner(self: ResticBackupper, *args, **kwargs):
|
def inner(self: ResticBackupper, *args, **kwargs):
|
||||||
try:
|
try:
|
||||||
return func(self, *args, **kwargs)
|
return func(self, *args, **kwargs)
|
||||||
except Exception as e:
|
except Exception as error:
|
||||||
if "unable to create lock" in str(e):
|
if "unable to create lock" in str(error):
|
||||||
self.unlock()
|
self.unlock()
|
||||||
return func(self, *args, **kwargs)
|
return func(self, *args, **kwargs)
|
||||||
else:
|
else:
|
||||||
raise e
|
raise error
|
||||||
|
|
||||||
# Above, we manually guarantee that the type returned is compatible.
|
# Above, we manually guarantee that the type returned is compatible.
|
||||||
return inner # type: ignore
|
return inner # type: ignore
|
||||||
|
@ -126,32 +126,6 @@ class ResticBackupper(AbstractBackupper):
|
||||||
output,
|
output,
|
||||||
)
|
)
|
||||||
|
|
||||||
def mount_repo(self, mount_directory):
|
|
||||||
mount_command = self.restic_command("mount", mount_directory)
|
|
||||||
mount_command.insert(0, "nohup")
|
|
||||||
handle = subprocess.Popen(
|
|
||||||
mount_command,
|
|
||||||
stdout=subprocess.DEVNULL,
|
|
||||||
shell=False,
|
|
||||||
)
|
|
||||||
sleep(2)
|
|
||||||
if "ids" not in listdir(mount_directory):
|
|
||||||
raise IOError("failed to mount dir ", mount_directory)
|
|
||||||
return handle
|
|
||||||
|
|
||||||
def unmount_repo(self, mount_directory):
|
|
||||||
mount_command = ["umount", "-l", mount_directory]
|
|
||||||
with subprocess.Popen(
|
|
||||||
mount_command, stdout=subprocess.PIPE, shell=False
|
|
||||||
) as handle:
|
|
||||||
output = handle.communicate()[0].decode("utf-8")
|
|
||||||
# TODO: check for exit code?
|
|
||||||
if "error" in output.lower():
|
|
||||||
return IOError("failed to unmount dir ", mount_directory, ": ", output)
|
|
||||||
|
|
||||||
if not listdir(mount_directory) == []:
|
|
||||||
return IOError("failed to unmount dir ", mount_directory)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def __flatten_list(list_to_flatten):
|
def __flatten_list(list_to_flatten):
|
||||||
"""string-aware list flattener"""
|
"""string-aware list flattener"""
|
||||||
|
@ -318,8 +292,8 @@ class ResticBackupper(AbstractBackupper):
|
||||||
break
|
break
|
||||||
if "unable" in line:
|
if "unable" in line:
|
||||||
raise ValueError(line)
|
raise ValueError(line)
|
||||||
except Exception as e:
|
except Exception as error:
|
||||||
raise ValueError("could not lock repository") from e
|
raise ValueError("could not lock repository") from error
|
||||||
|
|
||||||
@unlocked_repo
|
@unlocked_repo
|
||||||
def restored_size(self, snapshot_id: str) -> int:
|
def restored_size(self, snapshot_id: str) -> int:
|
||||||
|
@ -362,20 +336,21 @@ class ResticBackupper(AbstractBackupper):
|
||||||
if verify:
|
if verify:
|
||||||
self._raw_verified_restore(snapshot_id, target=temp_dir)
|
self._raw_verified_restore(snapshot_id, target=temp_dir)
|
||||||
snapshot_root = temp_dir
|
snapshot_root = temp_dir
|
||||||
else: # attempting inplace restore via mount + sync
|
for folder in folders:
|
||||||
self.mount_repo(temp_dir)
|
src = join(snapshot_root, folder.strip("/"))
|
||||||
snapshot_root = join(temp_dir, "ids", snapshot_id)
|
if not exists(src):
|
||||||
|
raise ValueError(
|
||||||
|
f"No such path: {src}. We tried to find {folder}"
|
||||||
|
)
|
||||||
|
dst = folder
|
||||||
|
sync(src, dst)
|
||||||
|
|
||||||
assert snapshot_root is not None
|
else: # attempting inplace restore
|
||||||
for folder in folders:
|
for folder in folders:
|
||||||
src = join(snapshot_root, folder.strip("/"))
|
rmtree(folder)
|
||||||
if not exists(src):
|
mkdir(folder)
|
||||||
raise ValueError(f"No such path: {src}. We tried to find {folder}")
|
self._raw_verified_restore(snapshot_id, target="/")
|
||||||
dst = folder
|
return
|
||||||
sync(src, dst)
|
|
||||||
|
|
||||||
if not verify:
|
|
||||||
self.unmount_repo(temp_dir)
|
|
||||||
|
|
||||||
def _raw_verified_restore(self, snapshot_id, target="/"):
|
def _raw_verified_restore(self, snapshot_id, target="/"):
|
||||||
"""barebones restic restore"""
|
"""barebones restic restore"""
|
||||||
|
|
|
@ -27,4 +27,4 @@ async def get_token_header(
|
||||||
|
|
||||||
def get_api_version() -> str:
|
def get_api_version() -> str:
|
||||||
"""Get API version"""
|
"""Get API version"""
|
||||||
return "2.3.0"
|
return "2.3.1"
|
||||||
|
|
2
setup.py
2
setup.py
|
@ -2,7 +2,7 @@ from setuptools import setup, find_packages
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
name="selfprivacy_api",
|
name="selfprivacy_api",
|
||||||
version="2.3.0",
|
version="2.3.1",
|
||||||
packages=find_packages(),
|
packages=find_packages(),
|
||||||
scripts=[
|
scripts=[
|
||||||
"selfprivacy_api/app.py",
|
"selfprivacy_api/app.py",
|
||||||
|
|
|
@ -8,6 +8,8 @@ from os import urandom
|
||||||
from datetime import datetime, timedelta, timezone
|
from datetime import datetime, timedelta, timezone
|
||||||
from subprocess import Popen
|
from subprocess import Popen
|
||||||
|
|
||||||
|
import tempfile
|
||||||
|
|
||||||
import selfprivacy_api.services as services
|
import selfprivacy_api.services as services
|
||||||
from selfprivacy_api.services import Service, get_all_services
|
from selfprivacy_api.services import Service, get_all_services
|
||||||
from selfprivacy_api.services.service import ServiceStatus
|
from selfprivacy_api.services.service import ServiceStatus
|
||||||
|
@ -735,25 +737,6 @@ def test_sync_nonexistent_src(dummy_service):
|
||||||
sync(src, dst)
|
sync(src, dst)
|
||||||
|
|
||||||
|
|
||||||
# Restic lowlevel
|
|
||||||
def test_mount_umount(backups, dummy_service, tmpdir):
|
|
||||||
Backups.back_up(dummy_service)
|
|
||||||
backupper = Backups.provider().backupper
|
|
||||||
assert isinstance(backupper, ResticBackupper)
|
|
||||||
|
|
||||||
mountpoint = tmpdir / "mount"
|
|
||||||
makedirs(mountpoint)
|
|
||||||
assert path.exists(mountpoint)
|
|
||||||
assert len(listdir(mountpoint)) == 0
|
|
||||||
|
|
||||||
handle = backupper.mount_repo(mountpoint)
|
|
||||||
assert len(listdir(mountpoint)) != 0
|
|
||||||
|
|
||||||
backupper.unmount_repo(mountpoint)
|
|
||||||
# handle.terminate()
|
|
||||||
assert len(listdir(mountpoint)) == 0
|
|
||||||
|
|
||||||
|
|
||||||
def test_move_blocks_backups(backups, dummy_service, restore_strategy):
|
def test_move_blocks_backups(backups, dummy_service, restore_strategy):
|
||||||
snap = Backups.back_up(dummy_service)
|
snap = Backups.back_up(dummy_service)
|
||||||
job = Jobs.add(
|
job = Jobs.add(
|
||||||
|
@ -816,3 +799,10 @@ def test_operations_while_locked(backups, dummy_service):
|
||||||
# check that no locks were left
|
# check that no locks were left
|
||||||
Backups.provider().backupper.lock()
|
Backups.provider().backupper.lock()
|
||||||
Backups.provider().backupper.unlock()
|
Backups.provider().backupper.unlock()
|
||||||
|
|
||||||
|
|
||||||
|
# a paranoid check to weed out problems with tempdirs that are not dependent on us
|
||||||
|
def test_tempfile():
|
||||||
|
with tempfile.TemporaryDirectory() as temp:
|
||||||
|
assert path.exists(temp)
|
||||||
|
assert not path.exists(temp)
|
||||||
|
|
Loading…
Reference in a new issue