mirror of
https://git.selfprivacy.org/SelfPrivacy/selfprivacy-rest-api.git
synced 2025-01-05 23:54:19 +00:00
Merge pull request 'fix(backups): do not infinitely retry automatic backup if it errors out' (#69) from autobackup-errors into master
Reviewed-on: https://git.selfprivacy.org/SelfPrivacy/selfprivacy-rest-api/pulls/69 Reviewed-by: Inex Code <inex.code@selfprivacy.org>
This commit is contained in:
commit
113bcf4c29
|
@ -7,6 +7,7 @@ from typing import Optional
|
|||
from pydantic import BaseModel
|
||||
from mnemonic import Mnemonic
|
||||
|
||||
from selfprivacy_api.utils.timeutils import ensure_tz_aware, ensure_tz_aware_strict
|
||||
from selfprivacy_api.repositories.tokens.redis_tokens_repository import (
|
||||
RedisTokensRepository,
|
||||
)
|
||||
|
@ -94,16 +95,22 @@ class RecoveryTokenStatus(BaseModel):
|
|||
|
||||
|
||||
def get_api_recovery_token_status() -> RecoveryTokenStatus:
|
||||
"""Get the recovery token status"""
|
||||
"""Get the recovery token status, timezone-aware"""
|
||||
token = TOKEN_REPO.get_recovery_key()
|
||||
if token is None:
|
||||
return RecoveryTokenStatus(exists=False, valid=False)
|
||||
is_valid = TOKEN_REPO.is_recovery_key_valid()
|
||||
|
||||
# New tokens are tz-aware, but older ones might not be
|
||||
expiry_date = token.expires_at
|
||||
if expiry_date is not None:
|
||||
expiry_date = ensure_tz_aware_strict(expiry_date)
|
||||
|
||||
return RecoveryTokenStatus(
|
||||
exists=True,
|
||||
valid=is_valid,
|
||||
date=_naive(token.created_at),
|
||||
expiration=_naive(token.expires_at),
|
||||
date=ensure_tz_aware_strict(token.created_at),
|
||||
expiration=expiry_date,
|
||||
uses_left=token.uses_left,
|
||||
)
|
||||
|
||||
|
@ -121,8 +128,9 @@ def get_new_api_recovery_key(
|
|||
) -> str:
|
||||
"""Get new recovery key"""
|
||||
if expiration_date is not None:
|
||||
current_time = datetime.now().timestamp()
|
||||
if expiration_date.timestamp() < current_time:
|
||||
expiration_date = ensure_tz_aware(expiration_date)
|
||||
current_time = datetime.now(timezone.utc)
|
||||
if expiration_date < current_time:
|
||||
raise InvalidExpirationDate("Expiration date is in the past")
|
||||
if uses_left is not None:
|
||||
if uses_left <= 0:
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
"""
|
||||
This module contains the controller class for backups.
|
||||
"""
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime, timedelta, timezone
|
||||
import time
|
||||
import os
|
||||
from os import statvfs
|
||||
from typing import Callable, List, Optional
|
||||
|
@ -37,6 +38,7 @@ from selfprivacy_api.backup.providers import get_provider
|
|||
from selfprivacy_api.backup.storage import Storage
|
||||
from selfprivacy_api.backup.jobs import (
|
||||
get_backup_job,
|
||||
get_backup_fail,
|
||||
add_backup_job,
|
||||
get_restore_job,
|
||||
add_restore_job,
|
||||
|
@ -292,9 +294,9 @@ class Backups:
|
|||
def back_up(
|
||||
service: Service, reason: BackupReason = BackupReason.EXPLICIT
|
||||
) -> Snapshot:
|
||||
"""The top-level function to back up a service"""
|
||||
folders = service.get_folders()
|
||||
service_name = service.get_id()
|
||||
"""The top-level function to back up a service
|
||||
If it fails for any reason at all, it should both mark job as
|
||||
errored and re-raise an error"""
|
||||
|
||||
job = get_backup_job(service)
|
||||
if job is None:
|
||||
|
@ -302,6 +304,10 @@ class Backups:
|
|||
Jobs.update(job, status=JobStatus.RUNNING)
|
||||
|
||||
try:
|
||||
if service.can_be_backed_up() is False:
|
||||
raise ValueError("cannot backup a non-backuppable service")
|
||||
folders = service.get_folders()
|
||||
service_name = service.get_id()
|
||||
service.pre_backup()
|
||||
snapshot = Backups.provider().backupper.start_backup(
|
||||
folders,
|
||||
|
@ -692,23 +698,45 @@ class Backups:
|
|||
"""Get a timezone-aware time of the last backup of a service"""
|
||||
return Storage.get_last_backup_time(service.get_id())
|
||||
|
||||
@staticmethod
|
||||
def get_last_backup_error_time(service: Service) -> Optional[datetime]:
|
||||
"""Get a timezone-aware time of the last backup of a service"""
|
||||
job = get_backup_fail(service)
|
||||
if job is not None:
|
||||
datetime_created = job.created_at
|
||||
if datetime_created.tzinfo is None:
|
||||
# assume it is in localtime
|
||||
offset = timedelta(seconds=time.localtime().tm_gmtoff)
|
||||
datetime_created = datetime_created - offset
|
||||
return datetime.combine(
|
||||
datetime_created.date(), datetime_created.time(), timezone.utc
|
||||
)
|
||||
return datetime_created
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def is_time_to_backup_service(service: Service, time: datetime):
|
||||
"""Returns True if it is time to back up a service"""
|
||||
period = Backups.autobackup_period_minutes()
|
||||
service_id = service.get_id()
|
||||
if not service.can_be_backed_up():
|
||||
return False
|
||||
if period is None:
|
||||
return False
|
||||
|
||||
last_backup = Storage.get_last_backup_time(service_id)
|
||||
last_error = Backups.get_last_backup_error_time(service)
|
||||
|
||||
if last_error is not None:
|
||||
if time < last_error + timedelta(seconds=AUTOBACKUP_JOB_EXPIRATION_SECONDS):
|
||||
return False
|
||||
|
||||
last_backup = Backups.get_last_backed_up(service)
|
||||
if last_backup is None:
|
||||
# queue a backup immediately if there are no previous backups
|
||||
return True
|
||||
|
||||
if time > last_backup + timedelta(minutes=period):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
# Helpers
|
||||
|
|
|
@ -80,9 +80,19 @@ def get_job_by_type(type_id: str) -> Optional[Job]:
|
|||
return job
|
||||
|
||||
|
||||
def get_failed_job_by_type(type_id: str) -> Optional[Job]:
|
||||
for job in Jobs.get_jobs():
|
||||
if job.type_id == type_id and job.status == JobStatus.ERROR:
|
||||
return job
|
||||
|
||||
|
||||
def get_backup_job(service: Service) -> Optional[Job]:
|
||||
return get_job_by_type(backup_job_type(service))
|
||||
|
||||
|
||||
def get_backup_fail(service: Service) -> Optional[Job]:
|
||||
return get_failed_job_by_type(backup_job_type(service))
|
||||
|
||||
|
||||
def get_restore_job(service: Service) -> Optional[Job]:
|
||||
return get_job_by_type(restore_job_type(service))
|
||||
|
|
|
@ -38,7 +38,7 @@ class ApiRecoveryKeyStatus:
|
|||
|
||||
|
||||
def get_recovery_key_status() -> ApiRecoveryKeyStatus:
|
||||
"""Get recovery key status"""
|
||||
"""Get recovery key status, times are timezone-aware"""
|
||||
status = get_api_recovery_token_status()
|
||||
if status is None or not status.exists:
|
||||
return ApiRecoveryKeyStatus(
|
||||
|
|
|
@ -8,8 +8,8 @@ A job is a dictionary with the following keys:
|
|||
- name: name of the job
|
||||
- description: description of the job
|
||||
- status: status of the job
|
||||
- created_at: date of creation of the job
|
||||
- updated_at: date of last update of the job
|
||||
- created_at: date of creation of the job, naive localtime
|
||||
- updated_at: date of last update of the job, naive localtime
|
||||
- finished_at: date of finish of the job
|
||||
- error: error message if the job failed
|
||||
- result: result of the job
|
||||
|
|
|
@ -22,7 +22,7 @@ class NewDeviceKey(BaseModel):
|
|||
|
||||
def is_valid(self) -> bool:
|
||||
"""
|
||||
Check if the recovery key is valid.
|
||||
Check if key is valid.
|
||||
"""
|
||||
if is_past(self.expires_at):
|
||||
return False
|
||||
|
@ -30,7 +30,7 @@ class NewDeviceKey(BaseModel):
|
|||
|
||||
def as_mnemonic(self) -> str:
|
||||
"""
|
||||
Get the recovery key as a mnemonic.
|
||||
Get the key as a mnemonic.
|
||||
"""
|
||||
return Mnemonic(language="english").to_mnemonic(bytes.fromhex(self.key))
|
||||
|
||||
|
|
|
@ -47,6 +47,7 @@ class RecoveryKey(BaseModel):
|
|||
) -> "RecoveryKey":
|
||||
"""
|
||||
Factory to generate a random token.
|
||||
If passed naive time as expiration, assumes utc
|
||||
"""
|
||||
creation_date = datetime.now(timezone.utc)
|
||||
if expiration is not None:
|
||||
|
|
52
selfprivacy_api/utils/timeutils.py
Normal file
52
selfprivacy_api/utils/timeutils.py
Normal file
|
@ -0,0 +1,52 @@
|
|||
from datetime import datetime, timezone
|
||||
|
||||
|
||||
def ensure_tz_aware(dt: datetime) -> datetime:
|
||||
"""
|
||||
returns timezone-aware datetime
|
||||
assumes utc on naive datetime input
|
||||
"""
|
||||
if dt.tzinfo is None:
|
||||
# astimezone() is dangerous, it makes an implicit assumption that
|
||||
# the time is localtime
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
return dt
|
||||
|
||||
|
||||
def ensure_tz_aware_strict(dt: datetime) -> datetime:
|
||||
"""
|
||||
returns timezone-aware datetime
|
||||
raises error if input is a naive datetime
|
||||
"""
|
||||
if dt.tzinfo is None:
|
||||
raise ValueError(
|
||||
"no timezone in datetime (tz-aware datetime is required for this operation)",
|
||||
dt,
|
||||
)
|
||||
return dt
|
||||
|
||||
|
||||
def tzaware_parse_time(iso_timestamp: str) -> datetime:
|
||||
"""
|
||||
parse an iso8601 timestamp into timezone-aware datetime
|
||||
assume utc if no timezone in stamp
|
||||
example of timestamp:
|
||||
2023-11-10T12:07:47.868788+00:00
|
||||
|
||||
"""
|
||||
dt = datetime.fromisoformat(iso_timestamp)
|
||||
dt = ensure_tz_aware(dt)
|
||||
return dt
|
||||
|
||||
|
||||
def tzaware_parse_time_strict(iso_timestamp: str) -> datetime:
|
||||
"""
|
||||
parse an iso8601 timestamp into timezone-aware datetime
|
||||
raise an error if no timezone in stamp
|
||||
example of timestamp:
|
||||
2023-11-10T12:07:47.868788+00:00
|
||||
|
||||
"""
|
||||
dt = datetime.fromisoformat(iso_timestamp)
|
||||
dt = ensure_tz_aware_strict(dt)
|
||||
return dt
|
|
@ -11,6 +11,10 @@ def five_minutes_into_future_naive():
|
|||
return datetime.now() + timedelta(minutes=5)
|
||||
|
||||
|
||||
def five_minutes_into_future_naive_utc():
|
||||
return datetime.utcnow() + timedelta(minutes=5)
|
||||
|
||||
|
||||
def five_minutes_into_future():
|
||||
return datetime.now(timezone.utc) + timedelta(minutes=5)
|
||||
|
||||
|
@ -19,6 +23,10 @@ def five_minutes_into_past_naive():
|
|||
return datetime.now() - timedelta(minutes=5)
|
||||
|
||||
|
||||
def five_minutes_into_past_naive_utc():
|
||||
return datetime.utcnow() - timedelta(minutes=5)
|
||||
|
||||
|
||||
def five_minutes_into_past():
|
||||
return datetime.now(timezone.utc) - timedelta(minutes=5)
|
||||
|
||||
|
@ -28,6 +36,10 @@ class NearFuture(datetime):
|
|||
def now(cls, tz=None):
|
||||
return datetime.now(tz) + timedelta(minutes=13)
|
||||
|
||||
@classmethod
|
||||
def utcnow(cls):
|
||||
return datetime.utcnow() + timedelta(minutes=13)
|
||||
|
||||
|
||||
def read_json(file_path):
|
||||
with open(file_path, "r", encoding="utf-8") as file:
|
||||
|
@ -59,8 +71,7 @@ def mnemonic_to_hex(mnemonic):
|
|||
return Mnemonic(language="english").to_entropy(mnemonic).hex()
|
||||
|
||||
|
||||
def assert_recovery_recent(time_generated):
|
||||
assert (
|
||||
datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%f") - timedelta(seconds=5)
|
||||
< datetime.now()
|
||||
def assert_recovery_recent(time_generated: str):
|
||||
assert datetime.fromisoformat(time_generated) - timedelta(seconds=5) < datetime.now(
|
||||
timezone.utc
|
||||
)
|
||||
|
|
|
@ -6,16 +6,16 @@ ORIGINAL_DEVICES = TOKENS_FILE_CONTENTS["tokens"]
|
|||
|
||||
def assert_ok(response, request):
|
||||
data = assert_data(response)
|
||||
data[request]["success"] is True
|
||||
data[request]["message"] is not None
|
||||
data[request]["code"] == 200
|
||||
assert data[request]["success"] is True
|
||||
assert data[request]["message"] is not None
|
||||
assert data[request]["code"] == 200
|
||||
|
||||
|
||||
def assert_errorcode(response, request, code):
|
||||
data = assert_data(response)
|
||||
data[request]["success"] is False
|
||||
data[request]["message"] is not None
|
||||
data[request]["code"] == code
|
||||
assert data[request]["success"] is False
|
||||
assert data[request]["message"] is not None
|
||||
assert data[request]["code"] == code
|
||||
|
||||
|
||||
def assert_empty(response):
|
||||
|
|
|
@ -2,6 +2,10 @@
|
|||
# pylint: disable=unused-argument
|
||||
# pylint: disable=missing-function-docstring
|
||||
|
||||
import pytest
|
||||
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from tests.common import (
|
||||
generate_api_query,
|
||||
assert_recovery_recent,
|
||||
|
@ -10,8 +14,9 @@ from tests.common import (
|
|||
)
|
||||
|
||||
# Graphql API's output should be timezone-naive
|
||||
from tests.common import five_minutes_into_future_naive as five_minutes_into_future
|
||||
from tests.common import five_minutes_into_past_naive as five_minutes_into_past
|
||||
from tests.common import five_minutes_into_future_naive_utc as five_minutes_into_future
|
||||
from tests.common import five_minutes_into_future as five_minutes_into_future_tz
|
||||
from tests.common import five_minutes_into_past_naive_utc as five_minutes_into_past
|
||||
|
||||
from tests.test_graphql.api_common import (
|
||||
assert_empty,
|
||||
|
@ -158,17 +163,24 @@ def test_graphql_generate_recovery_key(client, authorized_client, tokens_file):
|
|||
graphql_use_recovery_key(client, key, "new_test_token2")
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"expiration_date", [five_minutes_into_future(), five_minutes_into_future_tz()]
|
||||
)
|
||||
def test_graphql_generate_recovery_key_with_expiration_date(
|
||||
client, authorized_client, tokens_file
|
||||
client, authorized_client, tokens_file, expiration_date: datetime
|
||||
):
|
||||
expiration_date = five_minutes_into_future()
|
||||
key = graphql_make_new_recovery_key(authorized_client, expires_at=expiration_date)
|
||||
|
||||
status = graphql_recovery_status(authorized_client)
|
||||
assert status["exists"] is True
|
||||
assert status["valid"] is True
|
||||
assert_recovery_recent(status["creationDate"])
|
||||
assert status["expirationDate"] == expiration_date.isoformat()
|
||||
|
||||
# timezone-aware comparison. Should pass regardless of server's tz
|
||||
assert datetime.fromisoformat(status["expirationDate"]) == expiration_date.replace(
|
||||
tzinfo=timezone.utc
|
||||
)
|
||||
|
||||
assert status["usesLeft"] is None
|
||||
|
||||
graphql_use_recovery_key(client, key, "new_test_token")
|
||||
|
@ -194,7 +206,11 @@ def test_graphql_use_recovery_key_after_expiration(
|
|||
assert status["exists"] is True
|
||||
assert status["valid"] is False
|
||||
assert_recovery_recent(status["creationDate"])
|
||||
assert status["expirationDate"] == expiration_date.isoformat()
|
||||
|
||||
# timezone-aware comparison. Should pass regardless of server's tz
|
||||
assert datetime.fromisoformat(status["expirationDate"]) == expiration_date.replace(
|
||||
tzinfo=timezone.utc
|
||||
)
|
||||
assert status["usesLeft"] is None
|
||||
|
||||
|
||||
|
|
|
@ -14,6 +14,8 @@ import secrets
|
|||
|
||||
import tempfile
|
||||
|
||||
from selfprivacy_api.utils.huey import huey
|
||||
|
||||
import selfprivacy_api.services as services
|
||||
from selfprivacy_api.services import Service, get_all_services
|
||||
from selfprivacy_api.services.service import ServiceStatus
|
||||
|
@ -119,6 +121,10 @@ def dummy_service(tmpdir, backups, raw_dummy_service) -> Service:
|
|||
# register our service
|
||||
services.services.append(service)
|
||||
|
||||
# make sure we are in immediate mode because this thing is non pickleable to store on queue.
|
||||
huey.immediate = True
|
||||
assert huey.immediate is True
|
||||
|
||||
assert get_service_by_id(service.get_id()) is not None
|
||||
yield service
|
||||
|
||||
|
@ -996,6 +1002,32 @@ def test_autobackup_timing(backups, dummy_service):
|
|||
assert Backups.is_time_to_backup_service(dummy_service, future)
|
||||
|
||||
|
||||
def test_backup_unbackuppable(backups, dummy_service):
|
||||
dummy_service.set_backuppable(False)
|
||||
assert dummy_service.can_be_backed_up() is False
|
||||
with pytest.raises(ValueError):
|
||||
Backups.back_up(dummy_service)
|
||||
|
||||
|
||||
def test_failed_autoback_prevents_more_autobackup(backups, dummy_service):
|
||||
backup_period = 13 # minutes
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
Backups.set_autobackup_period_minutes(backup_period)
|
||||
assert Backups.is_time_to_backup_service(dummy_service, now)
|
||||
|
||||
# artificially making an errored out backup job
|
||||
dummy_service.set_backuppable(False)
|
||||
with pytest.raises(ValueError):
|
||||
Backups.back_up(dummy_service)
|
||||
dummy_service.set_backuppable(True)
|
||||
|
||||
assert Backups.get_last_backed_up(dummy_service) is None
|
||||
assert Backups.get_last_backup_error_time(dummy_service) is not None
|
||||
|
||||
assert Backups.is_time_to_backup_service(dummy_service, now) is False
|
||||
|
||||
|
||||
# Storage
|
||||
def test_snapshots_caching(backups, dummy_service):
|
||||
Backups.back_up(dummy_service)
|
||||
|
|
|
@ -1,18 +1,25 @@
|
|||
import pytest
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from selfprivacy_api.models.tokens.recovery_key import RecoveryKey
|
||||
from selfprivacy_api.models.tokens.new_device_key import NewDeviceKey
|
||||
|
||||
|
||||
def test_recovery_key_expired():
|
||||
expiration = datetime.now() - timedelta(minutes=5)
|
||||
def test_recovery_key_expired_utcnaive():
|
||||
expiration = datetime.utcnow() - timedelta(minutes=5)
|
||||
key = RecoveryKey.generate(expiration=expiration, uses_left=2)
|
||||
assert not key.is_valid()
|
||||
|
||||
|
||||
def test_recovery_key_expired_tzaware():
|
||||
expiration = datetime.now(timezone.utc) - timedelta(minutes=5)
|
||||
key = RecoveryKey.generate(expiration=expiration, uses_left=2)
|
||||
assert not key.is_valid()
|
||||
|
||||
|
||||
def test_new_device_key_expired():
|
||||
expiration = datetime.now() - timedelta(minutes=5)
|
||||
# key is supposed to be tzaware
|
||||
expiration = datetime.now(timezone.utc) - timedelta(minutes=5)
|
||||
key = NewDeviceKey.generate()
|
||||
key.expires_at = expiration
|
||||
assert not key.is_valid()
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
# pylint: disable=unused-argument
|
||||
# pylint: disable=missing-function-docstring
|
||||
import datetime
|
||||
from datetime import timezone
|
||||
import pytest
|
||||
|
||||
from tests.conftest import TOKENS_FILE_CONTENTS
|
||||
|
@ -11,8 +12,8 @@ from tests.common import (
|
|||
NearFuture,
|
||||
assert_recovery_recent,
|
||||
)
|
||||
from tests.common import five_minutes_into_future_naive as five_minutes_into_future
|
||||
from tests.common import five_minutes_into_past_naive as five_minutes_into_past
|
||||
from tests.common import five_minutes_into_future_naive_utc as five_minutes_into_future
|
||||
from tests.common import five_minutes_into_past_naive_utc as five_minutes_into_past
|
||||
|
||||
DATE_FORMATS = [
|
||||
"%Y-%m-%dT%H:%M:%S.%fZ",
|
||||
|
@ -337,7 +338,7 @@ def test_generate_recovery_token_with_expiration_date(
|
|||
"exists": True,
|
||||
"valid": True,
|
||||
"date": time_generated,
|
||||
"expiration": expiration_date.isoformat(),
|
||||
"expiration": expiration_date.replace(tzinfo=timezone.utc).isoformat(),
|
||||
"uses_left": None,
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue