mirror of
https://git.selfprivacy.org/SelfPrivacy/selfprivacy-rest-api.git
synced 2024-11-25 13:31:27 +00:00
Merge pull request 'quotas for autobackups' (#56) from quotas into master
Reviewed-on: https://git.selfprivacy.org/SelfPrivacy/selfprivacy-rest-api/pulls/56
This commit is contained in:
commit
6b106cbcf3
|
@ -4,7 +4,7 @@ This module contains the controller class for backups.
|
|||
from datetime import datetime, timedelta
|
||||
import os
|
||||
from os import statvfs
|
||||
from typing import List, Optional
|
||||
from typing import Callable, List, Optional
|
||||
|
||||
from selfprivacy_api.utils import ReadUserData, WriteUserData
|
||||
|
||||
|
@ -23,7 +23,12 @@ from selfprivacy_api.jobs import Jobs, JobStatus, Job
|
|||
from selfprivacy_api.graphql.queries.providers import (
|
||||
BackupProvider as BackupProviderEnum,
|
||||
)
|
||||
from selfprivacy_api.graphql.common_types.backup import RestoreStrategy
|
||||
from selfprivacy_api.graphql.common_types.backup import (
|
||||
RestoreStrategy,
|
||||
BackupReason,
|
||||
AutobackupQuotas,
|
||||
)
|
||||
|
||||
|
||||
from selfprivacy_api.models.backup.snapshot import Snapshot
|
||||
|
||||
|
@ -70,6 +75,24 @@ class NotDeadError(AssertionError):
|
|||
"""
|
||||
|
||||
|
||||
class RotationBucket:
|
||||
"""
|
||||
Bucket object used for rotation.
|
||||
Has the following mutable fields:
|
||||
- the counter, int
|
||||
- the lambda function which takes datetime and the int and returns the int
|
||||
- the last, int
|
||||
"""
|
||||
|
||||
def __init__(self, counter: int, last: int, rotation_lambda):
|
||||
self.counter: int = counter
|
||||
self.last: int = last
|
||||
self.rotation_lambda: Callable[[datetime, int], int] = rotation_lambda
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"Bucket(counter={self.counter}, last={self.last})"
|
||||
|
||||
|
||||
class Backups:
|
||||
"""A stateless controller class for backups"""
|
||||
|
||||
|
@ -264,10 +287,12 @@ class Backups:
|
|||
# Backup
|
||||
|
||||
@staticmethod
|
||||
def back_up(service: Service) -> Snapshot:
|
||||
def back_up(
|
||||
service: Service, reason: BackupReason = BackupReason.EXPLICIT
|
||||
) -> Snapshot:
|
||||
"""The top-level function to back up a service"""
|
||||
folders = service.get_folders()
|
||||
tag = service.get_id()
|
||||
service_name = service.get_id()
|
||||
|
||||
job = get_backup_job(service)
|
||||
if job is None:
|
||||
|
@ -278,9 +303,13 @@ class Backups:
|
|||
service.pre_backup()
|
||||
snapshot = Backups.provider().backupper.start_backup(
|
||||
folders,
|
||||
tag,
|
||||
service_name,
|
||||
reason=reason,
|
||||
)
|
||||
Backups._store_last_snapshot(tag, snapshot)
|
||||
|
||||
Backups._store_last_snapshot(service_name, snapshot)
|
||||
if reason == BackupReason.AUTO:
|
||||
Backups._prune_auto_snaps(service)
|
||||
service.post_restore()
|
||||
except Exception as error:
|
||||
Jobs.update(job, status=JobStatus.ERROR, status_text=str(error))
|
||||
|
@ -289,6 +318,108 @@ class Backups:
|
|||
Jobs.update(job, status=JobStatus.FINISHED)
|
||||
return snapshot
|
||||
|
||||
@staticmethod
|
||||
def _auto_snaps(service):
|
||||
return [
|
||||
snap
|
||||
for snap in Backups.get_snapshots(service)
|
||||
if snap.reason == BackupReason.AUTO
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def _prune_snaps_with_quotas(snapshots: List[Snapshot]) -> List[Snapshot]:
|
||||
# Function broken out for testability
|
||||
# Sorting newest first
|
||||
sorted_snaps = sorted(snapshots, key=lambda s: s.created_at, reverse=True)
|
||||
quotas: AutobackupQuotas = Backups.autobackup_quotas()
|
||||
|
||||
buckets: list[RotationBucket] = [
|
||||
RotationBucket(
|
||||
quotas.last,
|
||||
-1,
|
||||
lambda _, index: index,
|
||||
),
|
||||
RotationBucket(
|
||||
quotas.daily,
|
||||
-1,
|
||||
lambda date, _: date.year * 10000 + date.month * 100 + date.day,
|
||||
),
|
||||
RotationBucket(
|
||||
quotas.weekly,
|
||||
-1,
|
||||
lambda date, _: date.year * 100 + date.isocalendar()[1],
|
||||
),
|
||||
RotationBucket(
|
||||
quotas.monthly,
|
||||
-1,
|
||||
lambda date, _: date.year * 100 + date.month,
|
||||
),
|
||||
RotationBucket(
|
||||
quotas.yearly,
|
||||
-1,
|
||||
lambda date, _: date.year,
|
||||
),
|
||||
]
|
||||
|
||||
new_snaplist: List[Snapshot] = []
|
||||
for i, snap in enumerate(sorted_snaps):
|
||||
keep_snap = False
|
||||
for bucket in buckets:
|
||||
if (bucket.counter > 0) or (bucket.counter == -1):
|
||||
val = bucket.rotation_lambda(snap.created_at, i)
|
||||
if (val != bucket.last) or (i == len(sorted_snaps) - 1):
|
||||
bucket.last = val
|
||||
if bucket.counter > 0:
|
||||
bucket.counter -= 1
|
||||
if not keep_snap:
|
||||
new_snaplist.append(snap)
|
||||
keep_snap = True
|
||||
|
||||
return new_snaplist
|
||||
|
||||
@staticmethod
|
||||
def _prune_auto_snaps(service) -> None:
|
||||
# Not very testable by itself, so most testing is going on Backups._prune_snaps_with_quotas
|
||||
# We can still test total limits and, say, daily limits
|
||||
|
||||
auto_snaps = Backups._auto_snaps(service)
|
||||
new_snaplist = Backups._prune_snaps_with_quotas(auto_snaps)
|
||||
|
||||
# TODO: Can be optimized since there is forgetting of an array in one restic op
|
||||
# but most of the time this will be only one snap to forget.
|
||||
for snap in auto_snaps:
|
||||
if snap not in new_snaplist:
|
||||
Backups.forget_snapshot(snap)
|
||||
|
||||
@staticmethod
|
||||
def _standardize_quotas(i: int) -> int:
|
||||
if i <= -1:
|
||||
i = -1
|
||||
return i
|
||||
|
||||
@staticmethod
|
||||
def autobackup_quotas() -> AutobackupQuotas:
|
||||
"""0 means do not keep, -1 means unlimited"""
|
||||
|
||||
return Storage.autobackup_quotas()
|
||||
|
||||
@staticmethod
|
||||
def set_autobackup_quotas(quotas: AutobackupQuotas) -> None:
|
||||
"""0 means do not keep, -1 means unlimited"""
|
||||
|
||||
Storage.set_autobackup_quotas(
|
||||
AutobackupQuotas(
|
||||
last=Backups._standardize_quotas(quotas.last),
|
||||
daily=Backups._standardize_quotas(quotas.daily),
|
||||
weekly=Backups._standardize_quotas(quotas.weekly),
|
||||
monthly=Backups._standardize_quotas(quotas.monthly),
|
||||
yearly=Backups._standardize_quotas(quotas.yearly),
|
||||
)
|
||||
)
|
||||
|
||||
for service in get_all_services():
|
||||
Backups._prune_auto_snaps(service)
|
||||
|
||||
# Restoring
|
||||
|
||||
@staticmethod
|
||||
|
@ -309,7 +440,7 @@ class Backups:
|
|||
Jobs.update(
|
||||
job, status=JobStatus.CREATED, status_text=f"Waiting for pre-restore backup"
|
||||
)
|
||||
failsafe_snapshot = Backups.back_up(service)
|
||||
failsafe_snapshot = Backups.back_up(service, BackupReason.PRE_RESTORE)
|
||||
|
||||
Jobs.update(
|
||||
job, status=JobStatus.RUNNING, status_text=f"Restoring from {snapshot.id}"
|
||||
|
|
|
@ -2,6 +2,7 @@ from abc import ABC, abstractmethod
|
|||
from typing import List
|
||||
|
||||
from selfprivacy_api.models.backup.snapshot import Snapshot
|
||||
from selfprivacy_api.graphql.common_types.backup import BackupReason
|
||||
|
||||
|
||||
class AbstractBackupper(ABC):
|
||||
|
@ -22,7 +23,12 @@ class AbstractBackupper(ABC):
|
|||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
def start_backup(self, folders: List[str], tag: str) -> Snapshot:
|
||||
def start_backup(
|
||||
self,
|
||||
folders: List[str],
|
||||
service_name: str,
|
||||
reason: BackupReason = BackupReason.EXPLICIT,
|
||||
) -> Snapshot:
|
||||
"""Start a backup of the given folders"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
|
|
@ -2,6 +2,7 @@ from typing import List
|
|||
|
||||
from selfprivacy_api.models.backup.snapshot import Snapshot
|
||||
from selfprivacy_api.backup.backuppers import AbstractBackupper
|
||||
from selfprivacy_api.graphql.common_types.backup import BackupReason
|
||||
|
||||
|
||||
class NoneBackupper(AbstractBackupper):
|
||||
|
@ -13,7 +14,9 @@ class NoneBackupper(AbstractBackupper):
|
|||
def set_creds(self, account: str, key: str, repo: str):
|
||||
pass
|
||||
|
||||
def start_backup(self, folders: List[str], tag: str):
|
||||
def start_backup(
|
||||
self, folders: List[str], tag: str, reason: BackupReason = BackupReason.EXPLICIT
|
||||
):
|
||||
raise NotImplementedError
|
||||
|
||||
def get_snapshots(self) -> List[Snapshot]:
|
||||
|
|
|
@ -5,13 +5,14 @@ import json
|
|||
import datetime
|
||||
import tempfile
|
||||
|
||||
from typing import List, TypeVar, Callable
|
||||
from typing import List, Optional, TypeVar, Callable
|
||||
from collections.abc import Iterable
|
||||
from json.decoder import JSONDecodeError
|
||||
from os.path import exists, join
|
||||
from os import mkdir
|
||||
from shutil import rmtree
|
||||
|
||||
from selfprivacy_api.graphql.common_types.backup import BackupReason
|
||||
from selfprivacy_api.backup.util import output_yielder, sync
|
||||
from selfprivacy_api.backup.backuppers import AbstractBackupper
|
||||
from selfprivacy_api.models.backup.snapshot import Snapshot
|
||||
|
@ -84,7 +85,10 @@ class ResticBackupper(AbstractBackupper):
|
|||
def _password_command(self):
|
||||
return f"echo {LocalBackupSecret.get()}"
|
||||
|
||||
def restic_command(self, *args, tag: str = "") -> List[str]:
|
||||
def restic_command(self, *args, tags: Optional[List[str]] = None) -> List[str]:
|
||||
if tags is None:
|
||||
tags = []
|
||||
|
||||
command = [
|
||||
"restic",
|
||||
"-o",
|
||||
|
@ -94,13 +98,14 @@ class ResticBackupper(AbstractBackupper):
|
|||
"--password-command",
|
||||
self._password_command(),
|
||||
]
|
||||
if tag != "":
|
||||
command.extend(
|
||||
[
|
||||
"--tag",
|
||||
tag,
|
||||
]
|
||||
)
|
||||
if tags != []:
|
||||
for tag in tags:
|
||||
command.extend(
|
||||
[
|
||||
"--tag",
|
||||
tag,
|
||||
]
|
||||
)
|
||||
if args:
|
||||
command.extend(ResticBackupper.__flatten_list(args))
|
||||
return command
|
||||
|
@ -138,7 +143,12 @@ class ResticBackupper(AbstractBackupper):
|
|||
return result
|
||||
|
||||
@unlocked_repo
|
||||
def start_backup(self, folders: List[str], tag: str) -> Snapshot:
|
||||
def start_backup(
|
||||
self,
|
||||
folders: List[str],
|
||||
service_name: str,
|
||||
reason: BackupReason = BackupReason.EXPLICIT,
|
||||
) -> Snapshot:
|
||||
"""
|
||||
Start backup with restic
|
||||
"""
|
||||
|
@ -147,33 +157,35 @@ class ResticBackupper(AbstractBackupper):
|
|||
# of a string and an array of strings
|
||||
assert not isinstance(folders, str)
|
||||
|
||||
tags = [service_name, reason.value]
|
||||
|
||||
backup_command = self.restic_command(
|
||||
"backup",
|
||||
"--json",
|
||||
folders,
|
||||
tag=tag,
|
||||
tags=tags,
|
||||
)
|
||||
|
||||
messages = []
|
||||
|
||||
service = get_service_by_id(tag)
|
||||
service = get_service_by_id(service_name)
|
||||
if service is None:
|
||||
raise ValueError("No service with id ", tag)
|
||||
|
||||
raise ValueError("No service with id ", service_name)
|
||||
job = get_backup_job(service)
|
||||
|
||||
messages = []
|
||||
output = []
|
||||
try:
|
||||
for raw_message in output_yielder(backup_command):
|
||||
output.append(raw_message)
|
||||
message = self.parse_message(
|
||||
raw_message,
|
||||
job,
|
||||
)
|
||||
message = self.parse_message(raw_message, job)
|
||||
messages.append(message)
|
||||
return ResticBackupper._snapshot_from_backup_messages(
|
||||
messages,
|
||||
tag,
|
||||
id = ResticBackupper._snapshot_id_from_backup_messages(messages)
|
||||
return Snapshot(
|
||||
created_at=datetime.datetime.now(datetime.timezone.utc),
|
||||
id=id,
|
||||
service_name=service_name,
|
||||
reason=reason,
|
||||
)
|
||||
|
||||
except ValueError as error:
|
||||
raise ValueError(
|
||||
"Could not create a snapshot: ",
|
||||
|
@ -184,13 +196,13 @@ class ResticBackupper(AbstractBackupper):
|
|||
) from error
|
||||
|
||||
@staticmethod
|
||||
def _snapshot_from_backup_messages(messages, repo_name) -> Snapshot:
|
||||
def _snapshot_id_from_backup_messages(messages) -> str:
|
||||
for message in messages:
|
||||
if message["message_type"] == "summary":
|
||||
return ResticBackupper._snapshot_from_fresh_summary(
|
||||
message,
|
||||
repo_name,
|
||||
)
|
||||
# There is a discrepancy between versions of restic/rclone
|
||||
# Some report short_id in this field and some full
|
||||
return message["snapshot_id"][0:SHORT_ID_LEN]
|
||||
|
||||
raise ValueError("no summary message in restic json output")
|
||||
|
||||
def parse_message(self, raw_message_line: str, job=None) -> dict:
|
||||
|
@ -206,16 +218,6 @@ class ResticBackupper(AbstractBackupper):
|
|||
)
|
||||
return message
|
||||
|
||||
@staticmethod
|
||||
def _snapshot_from_fresh_summary(message: dict, repo_name) -> Snapshot:
|
||||
return Snapshot(
|
||||
# There is a discrepancy between versions of restic/rclone
|
||||
# Some report short_id in this field and some full
|
||||
id=message["snapshot_id"][0:SHORT_ID_LEN],
|
||||
created_at=datetime.datetime.now(datetime.timezone.utc),
|
||||
service_name=repo_name,
|
||||
)
|
||||
|
||||
def init(self) -> None:
|
||||
init_command = self.restic_command(
|
||||
"init",
|
||||
|
@ -391,6 +393,8 @@ class ResticBackupper(AbstractBackupper):
|
|||
forget_command = self.restic_command(
|
||||
"forget",
|
||||
snapshot_id,
|
||||
# TODO: prune should be done in a separate process
|
||||
"--prune",
|
||||
)
|
||||
|
||||
with subprocess.Popen(
|
||||
|
@ -450,11 +454,19 @@ class ResticBackupper(AbstractBackupper):
|
|||
def get_snapshots(self) -> List[Snapshot]:
|
||||
"""Get all snapshots from the repo"""
|
||||
snapshots = []
|
||||
|
||||
for restic_snapshot in self._load_snapshots():
|
||||
# Compatibility with previous snaps:
|
||||
if len(restic_snapshot["tags"]) == 1:
|
||||
reason = BackupReason.EXPLICIT
|
||||
else:
|
||||
reason = restic_snapshot["tags"][1]
|
||||
|
||||
snapshot = Snapshot(
|
||||
id=restic_snapshot["short_id"],
|
||||
created_at=restic_snapshot["time"],
|
||||
service_name=restic_snapshot["tags"][0],
|
||||
reason=reason,
|
||||
)
|
||||
|
||||
snapshots.append(snapshot)
|
||||
|
|
|
@ -6,6 +6,10 @@ from datetime import datetime
|
|||
|
||||
from selfprivacy_api.models.backup.snapshot import Snapshot
|
||||
from selfprivacy_api.models.backup.provider import BackupProviderModel
|
||||
from selfprivacy_api.graphql.common_types.backup import (
|
||||
AutobackupQuotas,
|
||||
_AutobackupQuotas,
|
||||
)
|
||||
|
||||
from selfprivacy_api.utils.redis_pool import RedisPool
|
||||
from selfprivacy_api.utils.redis_model_storage import (
|
||||
|
@ -23,6 +27,8 @@ REDIS_INITTED_CACHE = "backups:repo_initted"
|
|||
REDIS_PROVIDER_KEY = "backups:provider"
|
||||
REDIS_AUTOBACKUP_PERIOD_KEY = "backups:autobackup_period"
|
||||
|
||||
REDIS_AUTOBACKUP_QUOTAS_KEY = "backups:autobackup_quotas_key"
|
||||
|
||||
redis = RedisPool().get_connection()
|
||||
|
||||
|
||||
|
@ -35,6 +41,7 @@ class Storage:
|
|||
redis.delete(REDIS_PROVIDER_KEY)
|
||||
redis.delete(REDIS_AUTOBACKUP_PERIOD_KEY)
|
||||
redis.delete(REDIS_INITTED_CACHE)
|
||||
redis.delete(REDIS_AUTOBACKUP_QUOTAS_KEY)
|
||||
|
||||
prefixes_to_clean = [
|
||||
REDIS_SNAPSHOTS_PREFIX,
|
||||
|
@ -170,3 +177,23 @@ class Storage:
|
|||
def mark_as_uninitted():
|
||||
"""Marks the repository as initialized"""
|
||||
redis.delete(REDIS_INITTED_CACHE)
|
||||
|
||||
@staticmethod
|
||||
def set_autobackup_quotas(quotas: AutobackupQuotas) -> None:
|
||||
store_model_as_hash(redis, REDIS_AUTOBACKUP_QUOTAS_KEY, quotas.to_pydantic())
|
||||
|
||||
@staticmethod
|
||||
def autobackup_quotas() -> AutobackupQuotas:
|
||||
quotas_model = hash_as_model(
|
||||
redis, REDIS_AUTOBACKUP_QUOTAS_KEY, _AutobackupQuotas
|
||||
)
|
||||
if quotas_model is None:
|
||||
unlimited_quotas = AutobackupQuotas(
|
||||
last=-1,
|
||||
daily=-1,
|
||||
weekly=-1,
|
||||
monthly=-1,
|
||||
yearly=-1,
|
||||
)
|
||||
return unlimited_quotas
|
||||
return AutobackupQuotas.from_pydantic(quotas_model) # pylint: disable=no-member
|
||||
|
|
|
@ -3,7 +3,7 @@ The tasks module contains the worker tasks that are used to back up and restore
|
|||
"""
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from selfprivacy_api.graphql.common_types.backup import RestoreStrategy
|
||||
from selfprivacy_api.graphql.common_types.backup import RestoreStrategy, BackupReason
|
||||
|
||||
from selfprivacy_api.models.backup.snapshot import Snapshot
|
||||
from selfprivacy_api.utils.huey import huey
|
||||
|
@ -26,11 +26,13 @@ def validate_datetime(dt: datetime) -> bool:
|
|||
|
||||
# huey tasks need to return something
|
||||
@huey.task()
|
||||
def start_backup(service: Service) -> bool:
|
||||
def start_backup(
|
||||
service: Service, reason: BackupReason = BackupReason.EXPLICIT
|
||||
) -> bool:
|
||||
"""
|
||||
The worker task that starts the backup process.
|
||||
"""
|
||||
Backups.back_up(service)
|
||||
Backups.back_up(service, reason)
|
||||
return True
|
||||
|
||||
|
||||
|
@ -53,7 +55,7 @@ def automatic_backup():
|
|||
"""
|
||||
time = datetime.utcnow().replace(tzinfo=timezone.utc)
|
||||
for service in Backups.services_to_back_up(time):
|
||||
start_backup(service)
|
||||
start_backup(service, BackupReason.AUTO)
|
||||
|
||||
|
||||
@huey.periodic_task(crontab(hour=SNAPSHOT_CACHE_TTL_HOURS))
|
||||
|
|
|
@ -27,4 +27,4 @@ async def get_token_header(
|
|||
|
||||
def get_api_version() -> str:
|
||||
"""Get API version"""
|
||||
return "2.3.1"
|
||||
return "2.4.0"
|
||||
|
|
|
@ -1,10 +1,36 @@
|
|||
"""Backup"""
|
||||
# pylint: disable=too-few-public-methods
|
||||
import strawberry
|
||||
from enum import Enum
|
||||
import strawberry
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
@strawberry.enum
|
||||
class RestoreStrategy(Enum):
|
||||
INPLACE = "INPLACE"
|
||||
DOWNLOAD_VERIFY_OVERWRITE = "DOWNLOAD_VERIFY_OVERWRITE"
|
||||
|
||||
|
||||
@strawberry.enum
|
||||
class BackupReason(Enum):
|
||||
EXPLICIT = "EXPLICIT"
|
||||
AUTO = "AUTO"
|
||||
PRE_RESTORE = "PRE_RESTORE"
|
||||
|
||||
|
||||
class _AutobackupQuotas(BaseModel):
|
||||
last: int
|
||||
daily: int
|
||||
weekly: int
|
||||
monthly: int
|
||||
yearly: int
|
||||
|
||||
|
||||
@strawberry.experimental.pydantic.type(model=_AutobackupQuotas, all_fields=True)
|
||||
class AutobackupQuotas:
|
||||
pass
|
||||
|
||||
|
||||
@strawberry.experimental.pydantic.input(model=_AutobackupQuotas, all_fields=True)
|
||||
class AutobackupQuotasInput:
|
||||
pass
|
||||
|
|
|
@ -11,7 +11,10 @@ from selfprivacy_api.graphql.queries.backup import BackupConfiguration
|
|||
from selfprivacy_api.graphql.queries.backup import Backup
|
||||
from selfprivacy_api.graphql.queries.providers import BackupProvider
|
||||
from selfprivacy_api.graphql.common_types.jobs import job_to_api_job
|
||||
from selfprivacy_api.graphql.common_types.backup import RestoreStrategy
|
||||
from selfprivacy_api.graphql.common_types.backup import (
|
||||
AutobackupQuotasInput,
|
||||
RestoreStrategy,
|
||||
)
|
||||
|
||||
from selfprivacy_api.backup import Backups
|
||||
from selfprivacy_api.services import get_service_by_id
|
||||
|
@ -90,6 +93,33 @@ class BackupMutations:
|
|||
configuration=Backup().configuration(),
|
||||
)
|
||||
|
||||
@strawberry.mutation(permission_classes=[IsAuthenticated])
|
||||
def set_autobackup_quotas(
|
||||
self, quotas: AutobackupQuotasInput
|
||||
) -> GenericBackupConfigReturn:
|
||||
"""
|
||||
Set autobackup quotas.
|
||||
Values <=0 for any timeframe mean no limits for that timeframe.
|
||||
To disable autobackup use autobackup period setting, not this mutation.
|
||||
"""
|
||||
|
||||
try:
|
||||
Backups.set_autobackup_quotas(quotas)
|
||||
return GenericBackupConfigReturn(
|
||||
success=True,
|
||||
message="",
|
||||
code=200,
|
||||
configuration=Backup().configuration(),
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
return GenericBackupConfigReturn(
|
||||
success=False,
|
||||
message=str(e),
|
||||
code=400,
|
||||
configuration=Backup().configuration(),
|
||||
)
|
||||
|
||||
@strawberry.mutation(permission_classes=[IsAuthenticated])
|
||||
def start_backup(self, service_id: str) -> GenericJobMutationReturn:
|
||||
"""Start backup"""
|
||||
|
|
|
@ -13,6 +13,7 @@ from selfprivacy_api.graphql.common_types.service import (
|
|||
SnapshotInfo,
|
||||
service_to_graphql_service,
|
||||
)
|
||||
from selfprivacy_api.graphql.common_types.backup import AutobackupQuotas
|
||||
from selfprivacy_api.services import get_service_by_id
|
||||
|
||||
|
||||
|
@ -26,6 +27,8 @@ class BackupConfiguration:
|
|||
is_initialized: bool
|
||||
# If none, autobackups are disabled
|
||||
autobackup_period: typing.Optional[int]
|
||||
# None is equal to all quotas being unlimited (-1). Optional for compatibility reasons.
|
||||
autobackup_quotas: AutobackupQuotas
|
||||
# Bucket name for Backblaze, path for some other providers
|
||||
location_name: typing.Optional[str]
|
||||
location_id: typing.Optional[str]
|
||||
|
@ -42,6 +45,7 @@ class Backup:
|
|||
autobackup_period=Backups.autobackup_period_minutes(),
|
||||
location_name=Backups.provider().location,
|
||||
location_id=Backups.provider().repo_id,
|
||||
autobackup_quotas=Backups.autobackup_quotas(),
|
||||
)
|
||||
|
||||
@strawberry.field
|
||||
|
|
|
@ -1,8 +1,11 @@
|
|||
import datetime
|
||||
from pydantic import BaseModel
|
||||
|
||||
from selfprivacy_api.graphql.common_types.backup import BackupReason
|
||||
|
||||
|
||||
class Snapshot(BaseModel):
|
||||
id: str
|
||||
service_name: str
|
||||
created_at: datetime.datetime
|
||||
reason: BackupReason
|
||||
|
|
|
@ -1,11 +1,14 @@
|
|||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from enum import Enum
|
||||
|
||||
|
||||
def store_model_as_hash(redis, redis_key, model):
|
||||
for key, value in model.dict().items():
|
||||
if isinstance(value, datetime):
|
||||
value = value.isoformat()
|
||||
if isinstance(value, Enum):
|
||||
value = value.value
|
||||
redis.hset(redis_key, key, str(value))
|
||||
|
||||
|
||||
|
|
2
setup.py
2
setup.py
|
@ -2,7 +2,7 @@ from setuptools import setup, find_packages
|
|||
|
||||
setup(
|
||||
name="selfprivacy_api",
|
||||
version="2.3.1",
|
||||
version="2.4.0",
|
||||
packages=find_packages(),
|
||||
scripts=[
|
||||
"selfprivacy_api/app.py",
|
||||
|
|
|
@ -4,6 +4,10 @@ from tests.common import generate_backup_query
|
|||
|
||||
|
||||
from selfprivacy_api.graphql.common_types.service import service_to_graphql_service
|
||||
from selfprivacy_api.graphql.common_types.backup import (
|
||||
_AutobackupQuotas,
|
||||
AutobackupQuotas,
|
||||
)
|
||||
from selfprivacy_api.jobs import Jobs, JobStatus
|
||||
|
||||
API_RELOAD_SNAPSHOTS = """
|
||||
|
@ -38,6 +42,34 @@ mutation TestAutobackupPeriod($period: Int) {
|
|||
}
|
||||
"""
|
||||
|
||||
|
||||
API_SET_AUTOBACKUP_QUOTAS_MUTATION = """
|
||||
mutation TestAutobackupQuotas($input: AutobackupQuotasInput!) {
|
||||
backup {
|
||||
setAutobackupQuotas(quotas: $input) {
|
||||
success
|
||||
message
|
||||
code
|
||||
configuration {
|
||||
provider
|
||||
encryptionKey
|
||||
isInitialized
|
||||
autobackupPeriod
|
||||
locationName
|
||||
locationId
|
||||
autobackupQuotas {
|
||||
last
|
||||
daily
|
||||
weekly
|
||||
monthly
|
||||
yearly
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
API_REMOVE_REPOSITORY_MUTATION = """
|
||||
mutation TestRemoveRepo {
|
||||
backup {
|
||||
|
@ -177,6 +209,17 @@ def api_set_period(authorized_client, period):
|
|||
return response
|
||||
|
||||
|
||||
def api_set_quotas(authorized_client, quotas: _AutobackupQuotas):
|
||||
response = authorized_client.post(
|
||||
"/graphql",
|
||||
json={
|
||||
"query": API_SET_AUTOBACKUP_QUOTAS_MUTATION,
|
||||
"variables": {"input": quotas.dict()},
|
||||
},
|
||||
)
|
||||
return response
|
||||
|
||||
|
||||
def api_remove(authorized_client):
|
||||
response = authorized_client.post(
|
||||
"/graphql",
|
||||
|
@ -323,6 +366,22 @@ def test_remove(authorized_client, generic_userdata):
|
|||
assert configuration["isInitialized"] is False
|
||||
|
||||
|
||||
def test_autobackup_quotas_nonzero(authorized_client):
|
||||
quotas = _AutobackupQuotas(
|
||||
last=3,
|
||||
daily=2,
|
||||
weekly=4,
|
||||
monthly=13,
|
||||
yearly=14,
|
||||
)
|
||||
response = api_set_quotas(authorized_client, quotas)
|
||||
data = get_data(response)["backup"]["setAutobackupQuotas"]
|
||||
assert_ok(data)
|
||||
|
||||
configuration = data["configuration"]
|
||||
assert configuration["autobackupQuotas"] == quotas
|
||||
|
||||
|
||||
def test_autobackup_period_nonzero(authorized_client):
|
||||
new_period = 11
|
||||
response = api_set_period(authorized_client, new_period)
|
||||
|
|
|
@ -5,8 +5,12 @@ from os import makedirs
|
|||
from os import remove
|
||||
from os import listdir
|
||||
from os import urandom
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import datetime, timedelta, timezone, date, time
|
||||
from subprocess import Popen
|
||||
from copy import copy
|
||||
|
||||
import secrets
|
||||
|
||||
|
||||
import tempfile
|
||||
|
||||
|
@ -17,11 +21,13 @@ from selfprivacy_api.services.service import ServiceStatus
|
|||
from selfprivacy_api.services import get_service_by_id
|
||||
from selfprivacy_api.services.test_service import DummyService
|
||||
from selfprivacy_api.graphql.queries.providers import BackupProvider
|
||||
from selfprivacy_api.graphql.common_types.backup import RestoreStrategy
|
||||
from selfprivacy_api.graphql.common_types.backup import RestoreStrategy, BackupReason
|
||||
from selfprivacy_api.jobs import Jobs, JobStatus
|
||||
|
||||
from selfprivacy_api.models.backup.snapshot import Snapshot
|
||||
|
||||
from selfprivacy_api.graphql.common_types.backup import AutobackupQuotas
|
||||
|
||||
from selfprivacy_api.backup import Backups, BACKUP_PROVIDER_ENVS
|
||||
import selfprivacy_api.backup.providers as providers
|
||||
from selfprivacy_api.backup.providers import AbstractBackupProvider
|
||||
|
@ -293,6 +299,367 @@ def test_backup_returns_snapshot(backups, dummy_service):
|
|||
assert Backups.get_snapshot_by_id(snapshot.id) is not None
|
||||
assert snapshot.service_name == name
|
||||
assert snapshot.created_at is not None
|
||||
assert snapshot.reason == BackupReason.EXPLICIT
|
||||
|
||||
|
||||
def test_backup_reasons(backups, dummy_service):
|
||||
snap = Backups.back_up(dummy_service, BackupReason.AUTO)
|
||||
assert snap.reason == BackupReason.AUTO
|
||||
|
||||
Backups.force_snapshot_cache_reload()
|
||||
snaps = Backups.get_snapshots(dummy_service)
|
||||
assert snaps[0].reason == BackupReason.AUTO
|
||||
|
||||
|
||||
unlimited_quotas = AutobackupQuotas(
|
||||
last=-1,
|
||||
daily=-1,
|
||||
weekly=-1,
|
||||
monthly=-1,
|
||||
yearly=-1,
|
||||
)
|
||||
|
||||
zero_quotas = AutobackupQuotas(
|
||||
last=0,
|
||||
daily=0,
|
||||
weekly=0,
|
||||
monthly=0,
|
||||
yearly=0,
|
||||
)
|
||||
|
||||
|
||||
def test_get_empty_quotas(backups):
|
||||
quotas = Backups.autobackup_quotas()
|
||||
assert quotas is not None
|
||||
assert quotas == unlimited_quotas
|
||||
|
||||
|
||||
def test_set_quotas(backups):
|
||||
quotas = AutobackupQuotas(
|
||||
last=3,
|
||||
daily=2343,
|
||||
weekly=343,
|
||||
monthly=0,
|
||||
yearly=-34556,
|
||||
)
|
||||
Backups.set_autobackup_quotas(quotas)
|
||||
assert Backups.autobackup_quotas() == AutobackupQuotas(
|
||||
last=3,
|
||||
daily=2343,
|
||||
weekly=343,
|
||||
monthly=0,
|
||||
yearly=-1,
|
||||
)
|
||||
|
||||
|
||||
def test_set_zero_quotas(backups):
|
||||
quotas = AutobackupQuotas(
|
||||
last=0,
|
||||
daily=0,
|
||||
weekly=0,
|
||||
monthly=0,
|
||||
yearly=0,
|
||||
)
|
||||
Backups.set_autobackup_quotas(quotas)
|
||||
assert Backups.autobackup_quotas() == zero_quotas
|
||||
|
||||
|
||||
def test_set_unlimited_quotas(backups):
|
||||
quotas = AutobackupQuotas(
|
||||
last=-1,
|
||||
daily=-1,
|
||||
weekly=-1,
|
||||
monthly=-1,
|
||||
yearly=-1,
|
||||
)
|
||||
Backups.set_autobackup_quotas(quotas)
|
||||
assert Backups.autobackup_quotas() == unlimited_quotas
|
||||
|
||||
|
||||
def test_set_zero_quotas_after_unlimited(backups):
|
||||
quotas = AutobackupQuotas(
|
||||
last=-1,
|
||||
daily=-1,
|
||||
weekly=-1,
|
||||
monthly=-1,
|
||||
yearly=-1,
|
||||
)
|
||||
Backups.set_autobackup_quotas(quotas)
|
||||
assert Backups.autobackup_quotas() == unlimited_quotas
|
||||
|
||||
quotas = AutobackupQuotas(
|
||||
last=0,
|
||||
daily=0,
|
||||
weekly=0,
|
||||
monthly=0,
|
||||
yearly=0,
|
||||
)
|
||||
Backups.set_autobackup_quotas(quotas)
|
||||
assert Backups.autobackup_quotas() == zero_quotas
|
||||
|
||||
|
||||
def dummy_snapshot(date: datetime):
|
||||
return Snapshot(
|
||||
id=str(hash(date)),
|
||||
service_name="someservice",
|
||||
created_at=date,
|
||||
reason=BackupReason.EXPLICIT,
|
||||
)
|
||||
|
||||
|
||||
def test_autobackup_snapshots_pruning(backups):
|
||||
# Wednesday, fourth week
|
||||
now = datetime(year=2023, month=1, day=25, hour=10)
|
||||
|
||||
snaps = [
|
||||
dummy_snapshot(now),
|
||||
dummy_snapshot(now - timedelta(minutes=5)),
|
||||
dummy_snapshot(now - timedelta(hours=2)),
|
||||
dummy_snapshot(now - timedelta(hours=5)),
|
||||
dummy_snapshot(now - timedelta(days=1)),
|
||||
dummy_snapshot(now - timedelta(days=1, hours=2)),
|
||||
dummy_snapshot(now - timedelta(days=1, hours=3)),
|
||||
dummy_snapshot(now - timedelta(days=2)),
|
||||
dummy_snapshot(now - timedelta(days=7)),
|
||||
dummy_snapshot(now - timedelta(days=12)),
|
||||
dummy_snapshot(now - timedelta(days=23)),
|
||||
dummy_snapshot(now - timedelta(days=28)),
|
||||
dummy_snapshot(now - timedelta(days=32)),
|
||||
dummy_snapshot(now - timedelta(days=47)),
|
||||
dummy_snapshot(now - timedelta(days=64)),
|
||||
dummy_snapshot(now - timedelta(days=84)),
|
||||
dummy_snapshot(now - timedelta(days=104)),
|
||||
dummy_snapshot(now - timedelta(days=365 * 2)),
|
||||
]
|
||||
old_len = len(snaps)
|
||||
|
||||
quotas = copy(unlimited_quotas)
|
||||
Backups.set_autobackup_quotas(quotas)
|
||||
assert Backups._prune_snaps_with_quotas(snaps) == snaps
|
||||
|
||||
quotas = copy(zero_quotas)
|
||||
quotas.last = 2
|
||||
quotas.daily = 2
|
||||
Backups.set_autobackup_quotas(quotas)
|
||||
|
||||
snaps_to_keep = Backups._prune_snaps_with_quotas(snaps)
|
||||
assert snaps_to_keep == [
|
||||
dummy_snapshot(now),
|
||||
dummy_snapshot(now - timedelta(minutes=5)),
|
||||
# dummy_snapshot(now - timedelta(hours=2)),
|
||||
# dummy_snapshot(now - timedelta(hours=5)),
|
||||
dummy_snapshot(now - timedelta(days=1)),
|
||||
# dummy_snapshot(now - timedelta(days=1, hours=2)),
|
||||
# dummy_snapshot(now - timedelta(days=1, hours=3)),
|
||||
# dummy_snapshot(now - timedelta(days=2)),
|
||||
# dummy_snapshot(now - timedelta(days=7)),
|
||||
# dummy_snapshot(now - timedelta(days=12)),
|
||||
# dummy_snapshot(now - timedelta(days=23)),
|
||||
# dummy_snapshot(now - timedelta(days=28)),
|
||||
# dummy_snapshot(now - timedelta(days=32)),
|
||||
# dummy_snapshot(now - timedelta(days=47)),
|
||||
# dummy_snapshot(now - timedelta(days=64)),
|
||||
# dummy_snapshot(now - timedelta(days=84)),
|
||||
# dummy_snapshot(now - timedelta(days=104)),
|
||||
# dummy_snapshot(now - timedelta(days=365 * 2)),
|
||||
]
|
||||
|
||||
# checking that this function does not mutate the argument
|
||||
assert snaps != snaps_to_keep
|
||||
assert len(snaps) == old_len
|
||||
|
||||
quotas = copy(zero_quotas)
|
||||
quotas.weekly = 4
|
||||
Backups.set_autobackup_quotas(quotas)
|
||||
|
||||
snaps_to_keep = Backups._prune_snaps_with_quotas(snaps)
|
||||
assert snaps_to_keep == [
|
||||
dummy_snapshot(now),
|
||||
# dummy_snapshot(now - timedelta(minutes=5)),
|
||||
# dummy_snapshot(now - timedelta(hours=2)),
|
||||
# dummy_snapshot(now - timedelta(hours=5)),
|
||||
# dummy_snapshot(now - timedelta(days=1)),
|
||||
# dummy_snapshot(now - timedelta(days=1, hours=2)),
|
||||
# dummy_snapshot(now - timedelta(days=1, hours=3)),
|
||||
# dummy_snapshot(now - timedelta(days=2)),
|
||||
dummy_snapshot(now - timedelta(days=7)),
|
||||
dummy_snapshot(now - timedelta(days=12)),
|
||||
dummy_snapshot(now - timedelta(days=23)),
|
||||
# dummy_snapshot(now - timedelta(days=28)),
|
||||
# dummy_snapshot(now - timedelta(days=32)),
|
||||
# dummy_snapshot(now - timedelta(days=47)),
|
||||
# dummy_snapshot(now - timedelta(days=64)),
|
||||
# dummy_snapshot(now - timedelta(days=84)),
|
||||
# dummy_snapshot(now - timedelta(days=104)),
|
||||
# dummy_snapshot(now - timedelta(days=365 * 2)),
|
||||
]
|
||||
|
||||
quotas = copy(zero_quotas)
|
||||
quotas.monthly = 7
|
||||
Backups.set_autobackup_quotas(quotas)
|
||||
|
||||
snaps_to_keep = Backups._prune_snaps_with_quotas(snaps)
|
||||
assert snaps_to_keep == [
|
||||
dummy_snapshot(now),
|
||||
# dummy_snapshot(now - timedelta(minutes=5)),
|
||||
# dummy_snapshot(now - timedelta(hours=2)),
|
||||
# dummy_snapshot(now - timedelta(hours=5)),
|
||||
# dummy_snapshot(now - timedelta(days=1)),
|
||||
# dummy_snapshot(now - timedelta(days=1, hours=2)),
|
||||
# dummy_snapshot(now - timedelta(days=1, hours=3)),
|
||||
# dummy_snapshot(now - timedelta(days=2)),
|
||||
# dummy_snapshot(now - timedelta(days=7)),
|
||||
# dummy_snapshot(now - timedelta(days=12)),
|
||||
# dummy_snapshot(now - timedelta(days=23)),
|
||||
dummy_snapshot(now - timedelta(days=28)),
|
||||
# dummy_snapshot(now - timedelta(days=32)),
|
||||
# dummy_snapshot(now - timedelta(days=47)),
|
||||
dummy_snapshot(now - timedelta(days=64)),
|
||||
# dummy_snapshot(now - timedelta(days=84)),
|
||||
dummy_snapshot(now - timedelta(days=104)),
|
||||
dummy_snapshot(now - timedelta(days=365 * 2)),
|
||||
]
|
||||
|
||||
|
||||
def test_autobackup_snapshots_pruning_yearly(backups):
|
||||
snaps = [
|
||||
dummy_snapshot(datetime(year=2055, month=3, day=1)),
|
||||
dummy_snapshot(datetime(year=2055, month=2, day=1)),
|
||||
dummy_snapshot(datetime(year=2023, month=4, day=1)),
|
||||
dummy_snapshot(datetime(year=2023, month=3, day=1)),
|
||||
dummy_snapshot(datetime(year=2023, month=2, day=1)),
|
||||
dummy_snapshot(datetime(year=2021, month=2, day=1)),
|
||||
]
|
||||
quotas = copy(zero_quotas)
|
||||
quotas.yearly = 2
|
||||
Backups.set_autobackup_quotas(quotas)
|
||||
|
||||
snaps_to_keep = Backups._prune_snaps_with_quotas(snaps)
|
||||
assert snaps_to_keep == [
|
||||
dummy_snapshot(datetime(year=2055, month=3, day=1)),
|
||||
dummy_snapshot(datetime(year=2023, month=4, day=1)),
|
||||
]
|
||||
|
||||
|
||||
def test_autobackup_snapshots_pruning_bottleneck(backups):
|
||||
now = datetime(year=2023, month=1, day=25, hour=10)
|
||||
snaps = [
|
||||
dummy_snapshot(now),
|
||||
dummy_snapshot(now - timedelta(minutes=5)),
|
||||
dummy_snapshot(now - timedelta(hours=2)),
|
||||
dummy_snapshot(now - timedelta(hours=3)),
|
||||
dummy_snapshot(now - timedelta(hours=4)),
|
||||
]
|
||||
|
||||
yearly_quota = copy(zero_quotas)
|
||||
yearly_quota.yearly = 2
|
||||
|
||||
monthly_quota = copy(zero_quotas)
|
||||
monthly_quota.monthly = 2
|
||||
|
||||
weekly_quota = copy(zero_quotas)
|
||||
weekly_quota.weekly = 2
|
||||
|
||||
daily_quota = copy(zero_quotas)
|
||||
daily_quota.daily = 2
|
||||
|
||||
last_quota = copy(zero_quotas)
|
||||
last_quota.last = 1
|
||||
last_quota.yearly = 2
|
||||
|
||||
for quota in [last_quota, yearly_quota, monthly_quota, weekly_quota, daily_quota]:
|
||||
print(quota)
|
||||
Backups.set_autobackup_quotas(quota)
|
||||
snaps_to_keep = Backups._prune_snaps_with_quotas(snaps)
|
||||
assert snaps_to_keep == [
|
||||
dummy_snapshot(now),
|
||||
# If there is a vacant quota, we should keep the last snapshot even if it doesn't fit
|
||||
dummy_snapshot(now - timedelta(hours=4)),
|
||||
]
|
||||
|
||||
|
||||
def test_autobackup_snapshots_pruning_edgeweek(backups):
|
||||
# jan 1 2023 is Sunday
|
||||
snaps = [
|
||||
dummy_snapshot(datetime(year=2023, month=1, day=6)),
|
||||
dummy_snapshot(datetime(year=2023, month=1, day=1)),
|
||||
dummy_snapshot(datetime(year=2022, month=12, day=31)),
|
||||
dummy_snapshot(datetime(year=2022, month=12, day=30)),
|
||||
]
|
||||
quotas = copy(zero_quotas)
|
||||
quotas.weekly = 2
|
||||
Backups.set_autobackup_quotas(quotas)
|
||||
|
||||
snaps_to_keep = Backups._prune_snaps_with_quotas(snaps)
|
||||
assert snaps_to_keep == [
|
||||
dummy_snapshot(datetime(year=2023, month=1, day=6)),
|
||||
dummy_snapshot(datetime(year=2023, month=1, day=1)),
|
||||
]
|
||||
|
||||
|
||||
def test_autobackup_snapshots_pruning_big_gap(backups):
|
||||
snaps = [
|
||||
dummy_snapshot(datetime(year=2023, month=1, day=6)),
|
||||
dummy_snapshot(datetime(year=2023, month=1, day=2)),
|
||||
dummy_snapshot(datetime(year=2022, month=10, day=31)),
|
||||
dummy_snapshot(datetime(year=2022, month=10, day=30)),
|
||||
]
|
||||
quotas = copy(zero_quotas)
|
||||
quotas.weekly = 2
|
||||
Backups.set_autobackup_quotas(quotas)
|
||||
|
||||
snaps_to_keep = Backups._prune_snaps_with_quotas(snaps)
|
||||
assert snaps_to_keep == [
|
||||
dummy_snapshot(datetime(year=2023, month=1, day=6)),
|
||||
dummy_snapshot(datetime(year=2022, month=10, day=31)),
|
||||
]
|
||||
|
||||
|
||||
def test_too_many_auto(backups, dummy_service):
|
||||
assert Backups.autobackup_quotas()
|
||||
quota = copy(zero_quotas)
|
||||
quota.last = 2
|
||||
Backups.set_autobackup_quotas(quota)
|
||||
assert Backups.autobackup_quotas().last == 2
|
||||
|
||||
snap = Backups.back_up(dummy_service, BackupReason.AUTO)
|
||||
assert len(Backups.get_snapshots(dummy_service)) == 1
|
||||
snap2 = Backups.back_up(dummy_service, BackupReason.AUTO)
|
||||
assert len(Backups.get_snapshots(dummy_service)) == 2
|
||||
snap3 = Backups.back_up(dummy_service, BackupReason.AUTO)
|
||||
assert len(Backups.get_snapshots(dummy_service)) == 2
|
||||
|
||||
snaps = Backups.get_snapshots(dummy_service)
|
||||
assert snap2 in snaps
|
||||
assert snap3 in snaps
|
||||
assert snap not in snaps
|
||||
|
||||
quota.last = -1
|
||||
Backups.set_autobackup_quotas(quota)
|
||||
snap4 = Backups.back_up(dummy_service, BackupReason.AUTO)
|
||||
|
||||
snaps = Backups.get_snapshots(dummy_service)
|
||||
assert len(snaps) == 3
|
||||
assert snap4 in snaps
|
||||
|
||||
# Retroactivity
|
||||
quota.last = 1
|
||||
Backups.set_autobackup_quotas(quota)
|
||||
snaps = Backups.get_snapshots(dummy_service)
|
||||
assert len(snaps) == 1
|
||||
|
||||
snap5 = Backups.back_up(dummy_service, BackupReason.AUTO)
|
||||
snaps = Backups.get_snapshots(dummy_service)
|
||||
assert len(snaps) == 1
|
||||
assert snap5 in snaps
|
||||
|
||||
# Explicit snaps are not affected
|
||||
snap6 = Backups.back_up(dummy_service, BackupReason.EXPLICIT)
|
||||
|
||||
snaps = Backups.get_snapshots(dummy_service)
|
||||
assert len(snaps) == 2
|
||||
assert snap5 in snaps
|
||||
assert snap6 in snaps
|
||||
|
||||
|
||||
def folder_files(folder):
|
||||
|
@ -435,7 +802,10 @@ def test_forget_snapshot(backups, dummy_service):
|
|||
|
||||
def test_forget_nonexistent_snapshot(backups, dummy_service):
|
||||
bogus = Snapshot(
|
||||
id="gibberjibber", service_name="nohoho", created_at=datetime.now(timezone.utc)
|
||||
id="gibberjibber",
|
||||
service_name="nohoho",
|
||||
created_at=datetime.now(timezone.utc),
|
||||
reason=BackupReason.EXPLICIT,
|
||||
)
|
||||
with pytest.raises(ValueError):
|
||||
Backups.forget_snapshot(bogus)
|
||||
|
@ -508,6 +878,8 @@ def test_restore_snapshot_task(
|
|||
snaps = Backups.get_snapshots(dummy_service)
|
||||
if restore_strategy == RestoreStrategy.INPLACE:
|
||||
assert len(snaps) == 2
|
||||
reasons = [snap.reason for snap in snaps]
|
||||
assert BackupReason.PRE_RESTORE in reasons
|
||||
else:
|
||||
assert len(snaps) == 1
|
||||
|
||||
|
|
Loading…
Reference in a new issue