mirror of
https://git.selfprivacy.org/SelfPrivacy/selfprivacy-rest-api.git
synced 2025-01-11 18:39:30 +00:00
Merge branch 'restic-rewrite' of https://git.selfprivacy.org/SelfPrivacy/selfprivacy-rest-api into restic-rewrite
This commit is contained in:
commit
a303e5ce37
|
@ -7,6 +7,9 @@ steps:
|
||||||
commands:
|
commands:
|
||||||
- kill $(ps aux | grep 'redis-server 127.0.0.1:6389' | awk '{print $2}') || true
|
- kill $(ps aux | grep 'redis-server 127.0.0.1:6389' | awk '{print $2}') || true
|
||||||
- redis-server --bind 127.0.0.1 --port 6389 >/dev/null &
|
- redis-server --bind 127.0.0.1 --port 6389 >/dev/null &
|
||||||
|
# We do not care about persistance on CI
|
||||||
|
- sleep 10
|
||||||
|
- redis-cli -h 127.0.0.1 -p 6389 config set stop-writes-on-bgsave-error no
|
||||||
- coverage run -m pytest -q
|
- coverage run -m pytest -q
|
||||||
- coverage xml
|
- coverage xml
|
||||||
- sonar-scanner -Dsonar.projectKey=SelfPrivacy-REST-API -Dsonar.sources=. -Dsonar.host.url=http://analyzer.lan:9000 -Dsonar.login="$SONARQUBE_TOKEN"
|
- sonar-scanner -Dsonar.projectKey=SelfPrivacy-REST-API -Dsonar.sources=. -Dsonar.host.url=http://analyzer.lan:9000 -Dsonar.login="$SONARQUBE_TOKEN"
|
||||||
|
|
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -147,3 +147,4 @@ cython_debug/
|
||||||
# End of https://www.toptal.com/developers/gitignore/api/flask
|
# End of https://www.toptal.com/developers/gitignore/api/flask
|
||||||
|
|
||||||
*.db
|
*.db
|
||||||
|
*.rdb
|
||||||
|
|
64
api.nix
64
api.nix
|
@ -1,64 +0,0 @@
|
||||||
{ lib, python39Packages }:
|
|
||||||
with python39Packages;
|
|
||||||
buildPythonApplication {
|
|
||||||
pname = "selfprivacy-api";
|
|
||||||
version = "2.0.0";
|
|
||||||
|
|
||||||
propagatedBuildInputs = [
|
|
||||||
setuptools
|
|
||||||
portalocker
|
|
||||||
pytz
|
|
||||||
pytest
|
|
||||||
pytest-mock
|
|
||||||
pytest-datadir
|
|
||||||
huey
|
|
||||||
gevent
|
|
||||||
mnemonic
|
|
||||||
pydantic
|
|
||||||
typing-extensions
|
|
||||||
psutil
|
|
||||||
fastapi
|
|
||||||
uvicorn
|
|
||||||
(buildPythonPackage rec {
|
|
||||||
pname = "strawberry-graphql";
|
|
||||||
version = "0.123.0";
|
|
||||||
format = "pyproject";
|
|
||||||
patches = [
|
|
||||||
./strawberry-graphql.patch
|
|
||||||
];
|
|
||||||
propagatedBuildInputs = [
|
|
||||||
typing-extensions
|
|
||||||
python-multipart
|
|
||||||
python-dateutil
|
|
||||||
# flask
|
|
||||||
pydantic
|
|
||||||
pygments
|
|
||||||
poetry
|
|
||||||
# flask-cors
|
|
||||||
(buildPythonPackage rec {
|
|
||||||
pname = "graphql-core";
|
|
||||||
version = "3.2.0";
|
|
||||||
format = "setuptools";
|
|
||||||
src = fetchPypi {
|
|
||||||
inherit pname version;
|
|
||||||
sha256 = "sha256-huKgvgCL/eGe94OI3opyWh2UKpGQykMcJKYIN5c4A84=";
|
|
||||||
};
|
|
||||||
checkInputs = [
|
|
||||||
pytest-asyncio
|
|
||||||
pytest-benchmark
|
|
||||||
pytestCheckHook
|
|
||||||
];
|
|
||||||
pythonImportsCheck = [
|
|
||||||
"graphql"
|
|
||||||
];
|
|
||||||
})
|
|
||||||
];
|
|
||||||
src = fetchPypi {
|
|
||||||
inherit pname version;
|
|
||||||
sha256 = "KsmZ5Xv8tUg6yBxieAEtvoKoRG60VS+iVGV0X6oCExo=";
|
|
||||||
};
|
|
||||||
})
|
|
||||||
];
|
|
||||||
|
|
||||||
src = ./.;
|
|
||||||
}
|
|
|
@ -1,2 +0,0 @@
|
||||||
{ pkgs ? import <nixpkgs> {} }:
|
|
||||||
pkgs.callPackage ./api.nix {}
|
|
|
@ -1,12 +1,16 @@
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from operator import add
|
from operator import add
|
||||||
from os import statvfs, path, walk
|
from os import statvfs
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
from selfprivacy_api.utils import ReadUserData, WriteUserData
|
from selfprivacy_api.utils import ReadUserData, WriteUserData
|
||||||
|
|
||||||
from selfprivacy_api.services import get_service_by_id
|
from selfprivacy_api.services import get_service_by_id
|
||||||
from selfprivacy_api.services.service import Service, ServiceStatus, StoppedService
|
from selfprivacy_api.services.service import (
|
||||||
|
Service,
|
||||||
|
ServiceStatus,
|
||||||
|
StoppedService,
|
||||||
|
)
|
||||||
|
|
||||||
from selfprivacy_api.jobs import Jobs, JobStatus, Job
|
from selfprivacy_api.jobs import Jobs, JobStatus, Job
|
||||||
|
|
||||||
|
@ -41,16 +45,17 @@ class NotDeadError(AssertionError):
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return f"""
|
return f"""
|
||||||
Service {self.service_name} should be either stopped or dead from an error before we back up.
|
Service {self.service_name} should be either stopped or dead from
|
||||||
Normally, this error is unreachable because we do try ensure this.
|
an error before we back up.
|
||||||
Apparently, not this time.
|
Normally, this error is unreachable because we do try ensure this.
|
||||||
"""
|
Apparently, not this time.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
class Backups:
|
class Backups:
|
||||||
"""A stateless controller class for backups"""
|
"""A stateless controller class for backups"""
|
||||||
|
|
||||||
### Providers
|
# Providers
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def provider():
|
def provider():
|
||||||
|
@ -172,7 +177,7 @@ class Backups:
|
||||||
|
|
||||||
user_data["backup"] = DEFAULT_JSON_PROVIDER
|
user_data["backup"] = DEFAULT_JSON_PROVIDER
|
||||||
|
|
||||||
### Init
|
# Init
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def init_repo():
|
def init_repo():
|
||||||
|
@ -191,7 +196,7 @@ class Backups:
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
### Backup
|
# Backup
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def back_up(service: Service):
|
def back_up(service: Service):
|
||||||
|
@ -221,7 +226,8 @@ class Backups:
|
||||||
Jobs.update(job, status=JobStatus.FINISHED)
|
Jobs.update(job, status=JobStatus.FINISHED)
|
||||||
return snapshot
|
return snapshot
|
||||||
|
|
||||||
### Restoring
|
# Restoring
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _ensure_queued_restore_job(service, snapshot) -> Job:
|
def _ensure_queued_restore_job(service, snapshot) -> Job:
|
||||||
job = get_restore_job(service)
|
job = get_restore_job(service)
|
||||||
|
@ -237,12 +243,17 @@ class Backups:
|
||||||
|
|
||||||
Jobs.update(job, status=JobStatus.RUNNING)
|
Jobs.update(job, status=JobStatus.RUNNING)
|
||||||
try:
|
try:
|
||||||
Backups._restore_service_from_snapshot(service, snapshot.id, verify=False)
|
Backups._restore_service_from_snapshot(
|
||||||
|
service,
|
||||||
|
snapshot.id,
|
||||||
|
verify=False,
|
||||||
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
Backups._restore_service_from_snapshot(
|
Backups._restore_service_from_snapshot(
|
||||||
service, failsafe_snapshot.id, verify=False
|
service, failsafe_snapshot.id, verify=False
|
||||||
)
|
)
|
||||||
raise e
|
raise e
|
||||||
|
# TODO: Do we really have to forget this snapshot? — Inex
|
||||||
Backups.forget_snapshot(failsafe_snapshot)
|
Backups.forget_snapshot(failsafe_snapshot)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -295,8 +306,9 @@ class Backups:
|
||||||
else:
|
else:
|
||||||
raise NotImplementedError(
|
raise NotImplementedError(
|
||||||
"""
|
"""
|
||||||
We do not know if there is enough space for restoration because there is some novel restore strategy used!
|
We do not know if there is enough space for restoration because
|
||||||
This is a developer's fault, open a issue please
|
there is some novel restore strategy used!
|
||||||
|
This is a developer's fault, open an issue please
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
available_space = Backups.space_usable_for_service(service)
|
available_space = Backups.space_usable_for_service(service)
|
||||||
|
@ -307,15 +319,20 @@ class Backups:
|
||||||
)
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _restore_service_from_snapshot(service: Service, snapshot_id: str, verify=True):
|
def _restore_service_from_snapshot(
|
||||||
|
service: Service,
|
||||||
|
snapshot_id: str,
|
||||||
|
verify=True,
|
||||||
|
):
|
||||||
folders = service.get_folders()
|
folders = service.get_folders()
|
||||||
|
|
||||||
Backups.provider().backupper.restore_from_backup(
|
Backups.provider().backupper.restore_from_backup(
|
||||||
snapshot_id,
|
snapshot_id,
|
||||||
folders,
|
folders,
|
||||||
|
verify=verify,
|
||||||
)
|
)
|
||||||
|
|
||||||
### Snapshots
|
# Snapshots
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_snapshots(service: Service) -> List[Snapshot]:
|
def get_snapshots(service: Service) -> List[Snapshot]:
|
||||||
|
@ -377,7 +394,7 @@ class Backups:
|
||||||
# expiring cache entry
|
# expiring cache entry
|
||||||
Storage.cache_snapshot(snapshot)
|
Storage.cache_snapshot(snapshot)
|
||||||
|
|
||||||
### Autobackup
|
# Autobackup
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def is_autobackup_enabled(service: Service) -> bool:
|
def is_autobackup_enabled(service: Service) -> bool:
|
||||||
|
@ -472,7 +489,7 @@ class Backups:
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
|
|
||||||
### Helpers
|
# Helpers
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def space_usable_for_service(service: Service) -> int:
|
def space_usable_for_service(service: Service) -> int:
|
||||||
|
@ -500,6 +517,9 @@ class Backups:
|
||||||
def assert_dead(service: Service):
|
def assert_dead(service: Service):
|
||||||
# if we backup the service that is failing to restore it to the
|
# if we backup the service that is failing to restore it to the
|
||||||
# previous snapshot, its status can be FAILED
|
# previous snapshot, its status can be FAILED
|
||||||
# And obviously restoring a failed service is the moun route
|
# And obviously restoring a failed service is the main route
|
||||||
if service.get_status() not in [ServiceStatus.INACTIVE, ServiceStatus.FAILED]:
|
if service.get_status() not in [
|
||||||
|
ServiceStatus.INACTIVE,
|
||||||
|
ServiceStatus.FAILED,
|
||||||
|
]:
|
||||||
raise NotDeadError(service)
|
raise NotDeadError(service)
|
||||||
|
|
|
@ -30,7 +30,12 @@ class AbstractBackupper(ABC):
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def restore_from_backup(self, snapshot_id: str, folders: List[str], verify=True):
|
def restore_from_backup(
|
||||||
|
self,
|
||||||
|
snapshot_id: str,
|
||||||
|
folders: List[str],
|
||||||
|
verify=True,
|
||||||
|
):
|
||||||
"""Restore a target folder using a snapshot"""
|
"""Restore a target folder using a snapshot"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
|
@ -81,7 +81,7 @@ class ResticBackupper(AbstractBackupper):
|
||||||
mount_command.insert(0, "nohup")
|
mount_command.insert(0, "nohup")
|
||||||
handle = subprocess.Popen(mount_command, stdout=subprocess.DEVNULL, shell=False)
|
handle = subprocess.Popen(mount_command, stdout=subprocess.DEVNULL, shell=False)
|
||||||
sleep(2)
|
sleep(2)
|
||||||
if not "ids" in listdir(dir):
|
if "ids" not in listdir(dir):
|
||||||
raise IOError("failed to mount dir ", dir)
|
raise IOError("failed to mount dir ", dir)
|
||||||
return handle
|
return handle
|
||||||
|
|
||||||
|
@ -211,7 +211,12 @@ class ResticBackupper(AbstractBackupper):
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise ValueError("cannot restore a snapshot: " + output) from e
|
raise ValueError("cannot restore a snapshot: " + output) from e
|
||||||
|
|
||||||
def restore_from_backup(self, snapshot_id, folders: List[str], verify=True):
|
def restore_from_backup(
|
||||||
|
self,
|
||||||
|
snapshot_id,
|
||||||
|
folders: List[str],
|
||||||
|
verify=True,
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
Restore from backup with restic
|
Restore from backup with restic
|
||||||
"""
|
"""
|
||||||
|
@ -236,6 +241,9 @@ class ResticBackupper(AbstractBackupper):
|
||||||
dst = folder
|
dst = folder
|
||||||
sync(src, dst)
|
sync(src, dst)
|
||||||
|
|
||||||
|
if not verify:
|
||||||
|
self.unmount_repo(dir)
|
||||||
|
|
||||||
def do_restore(self, snapshot_id, target="/", verify=False):
|
def do_restore(self, snapshot_id, target="/", verify=False):
|
||||||
"""barebones restic restore"""
|
"""barebones restic restore"""
|
||||||
restore_command = self.restic_command(
|
restore_command = self.restic_command(
|
||||||
|
|
|
@ -27,4 +27,4 @@ async def get_token_header(
|
||||||
|
|
||||||
def get_api_version() -> str:
|
def get_api_version() -> str:
|
||||||
"""Get API version"""
|
"""Get API version"""
|
||||||
return "2.1.2"
|
return "2.1.3"
|
||||||
|
|
|
@ -22,6 +22,9 @@ from selfprivacy_api.migrations.providers import CreateProviderFields
|
||||||
from selfprivacy_api.migrations.prepare_for_nixos_2211 import (
|
from selfprivacy_api.migrations.prepare_for_nixos_2211 import (
|
||||||
MigrateToSelfprivacyChannelFrom2205,
|
MigrateToSelfprivacyChannelFrom2205,
|
||||||
)
|
)
|
||||||
|
from selfprivacy_api.migrations.prepare_for_nixos_2305 import (
|
||||||
|
MigrateToSelfprivacyChannelFrom2211,
|
||||||
|
)
|
||||||
|
|
||||||
migrations = [
|
migrations = [
|
||||||
FixNixosConfigBranch(),
|
FixNixosConfigBranch(),
|
||||||
|
@ -31,6 +34,7 @@ migrations = [
|
||||||
CheckForFailedBindsMigration(),
|
CheckForFailedBindsMigration(),
|
||||||
CreateProviderFields(),
|
CreateProviderFields(),
|
||||||
MigrateToSelfprivacyChannelFrom2205(),
|
MigrateToSelfprivacyChannelFrom2205(),
|
||||||
|
MigrateToSelfprivacyChannelFrom2211(),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
|
58
selfprivacy_api/migrations/prepare_for_nixos_2305.py
Normal file
58
selfprivacy_api/migrations/prepare_for_nixos_2305.py
Normal file
|
@ -0,0 +1,58 @@
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
from selfprivacy_api.migrations.migration import Migration
|
||||||
|
|
||||||
|
|
||||||
|
class MigrateToSelfprivacyChannelFrom2211(Migration):
|
||||||
|
"""Migrate to selfprivacy Nix channel.
|
||||||
|
For some reason NixOS 22.11 servers initialized with the nixos channel instead of selfprivacy.
|
||||||
|
This stops us from upgrading to NixOS 23.05
|
||||||
|
"""
|
||||||
|
|
||||||
|
def get_migration_name(self):
|
||||||
|
return "migrate_to_selfprivacy_channel_from_2211"
|
||||||
|
|
||||||
|
def get_migration_description(self):
|
||||||
|
return "Migrate to selfprivacy Nix channel from NixOS 22.11."
|
||||||
|
|
||||||
|
def is_migration_needed(self):
|
||||||
|
try:
|
||||||
|
output = subprocess.check_output(
|
||||||
|
["nix-channel", "--list"], start_new_session=True
|
||||||
|
)
|
||||||
|
output = output.decode("utf-8")
|
||||||
|
first_line = output.split("\n", maxsplit=1)[0]
|
||||||
|
return first_line.startswith("nixos") and (
|
||||||
|
first_line.endswith("nixos-22.11")
|
||||||
|
)
|
||||||
|
except subprocess.CalledProcessError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def migrate(self):
|
||||||
|
# Change the channel and update them.
|
||||||
|
# Also, go to /etc/nixos directory and make a git pull
|
||||||
|
current_working_directory = os.getcwd()
|
||||||
|
try:
|
||||||
|
print("Changing channel")
|
||||||
|
os.chdir("/etc/nixos")
|
||||||
|
subprocess.check_output(
|
||||||
|
[
|
||||||
|
"nix-channel",
|
||||||
|
"--add",
|
||||||
|
"https://channel.selfprivacy.org/nixos-selfpricacy",
|
||||||
|
"nixos",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
subprocess.check_output(["nix-channel", "--update"])
|
||||||
|
nixos_config_branch = subprocess.check_output(
|
||||||
|
["git", "rev-parse", "--abbrev-ref", "HEAD"], start_new_session=True
|
||||||
|
)
|
||||||
|
if nixos_config_branch.decode("utf-8").strip() == "api-redis":
|
||||||
|
print("Also changing nixos-config branch from api-redis to master")
|
||||||
|
subprocess.check_output(["git", "checkout", "master"])
|
||||||
|
subprocess.check_output(["git", "pull"])
|
||||||
|
os.chdir(current_working_directory)
|
||||||
|
except subprocess.CalledProcessError:
|
||||||
|
os.chdir(current_working_directory)
|
||||||
|
print("Error")
|
Loading…
Reference in a new issue