Migrate to FastAPI (#13)

Co-authored-by: inexcode <inex.code@selfprivacy.org>
Reviewed-on: https://git.selfprivacy.org/SelfPrivacy/selfprivacy-rest-api/pulls/13
This commit is contained in:
Inex Code 2022-08-25 20:03:56 +03:00
parent 206589d5ad
commit 7935de0fe1
167 changed files with 6088 additions and 3695 deletions

2
.gitignore vendored
View file

@ -145,3 +145,5 @@ dmypy.json
cython_debug/
# End of https://www.toptal.com/developers/gitignore/api/flask
*.db

View file

@ -1,2 +1,3 @@
[MASTER]
init-hook="from pylint.config import find_pylintrc; import os, sys; sys.path.append(os.path.dirname(find_pylintrc()))"
extension-pkg-whitelist=pydantic

19
.vscode/launch.json vendored Normal file
View file

@ -0,0 +1,19 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Python: FastAPI",
"type": "python",
"request": "launch",
"module": "uvicorn",
"args": [
"selfprivacy_api.app:app"
],
"jinja": true,
"justMyCode": false
}
]
}

64
api.nix Normal file
View file

@ -0,0 +1,64 @@
{ lib, python39Packages }:
with python39Packages;
buildPythonApplication {
pname = "selfprivacy-api";
version = "2.0.0";
propagatedBuildInputs = [
setuptools
portalocker
pytz
pytest
pytest-mock
pytest-datadir
huey
gevent
mnemonic
pydantic
typing-extensions
psutil
fastapi
uvicorn
(buildPythonPackage rec {
pname = "strawberry-graphql";
version = "0.123.0";
format = "pyproject";
patches = [
./strawberry-graphql.patch
];
propagatedBuildInputs = [
typing-extensions
python-multipart
python-dateutil
# flask
pydantic
pygments
poetry
# flask-cors
(buildPythonPackage rec {
pname = "graphql-core";
version = "3.2.0";
format = "setuptools";
src = fetchPypi {
inherit pname version;
sha256 = "sha256-huKgvgCL/eGe94OI3opyWh2UKpGQykMcJKYIN5c4A84=";
};
checkInputs = [
pytest-asyncio
pytest-benchmark
pytestCheckHook
];
pythonImportsCheck = [
"graphql"
];
})
];
src = fetchPypi {
inherit pname version;
sha256 = "KsmZ5Xv8tUg6yBxieAEtvoKoRG60VS+iVGV0X6oCExo=";
};
})
];
src = ./.;
}

2
default.nix Normal file
View file

@ -0,0 +1,2 @@
{ pkgs ? import <nixpkgs> {} }:
pkgs.callPackage ./api.nix {}

View file

@ -1,3 +1,3 @@
[build-system]
requires = ["setuptools", "wheel", "portalocker", "flask-swagger", "flask-swagger-ui"]
build-backend = "setuptools.build_meta"
requires = ["setuptools", "wheel", "portalocker"]
build-backend = "setuptools.build_meta"

View file

@ -1,17 +0,0 @@
wheel
flask
flask_restful
flask_socketio
setuptools
portalocker
flask-swagger
flask-swagger-ui
pytz
huey
gevent
mnemonic
pytest
coverage
pytest-mock
pytest-datadir

View file

@ -0,0 +1,116 @@
"""App tokens actions"""
from datetime import datetime
from typing import Optional
from pydantic import BaseModel
from selfprivacy_api.utils.auth import (
delete_token,
generate_recovery_token,
get_recovery_token_status,
get_tokens_info,
is_recovery_token_exists,
is_recovery_token_valid,
is_token_name_exists,
is_token_name_pair_valid,
refresh_token,
get_token_name,
)
class TokenInfoWithIsCaller(BaseModel):
"""Token info"""
name: str
date: datetime
is_caller: bool
def get_api_tokens_with_caller_flag(caller_token: str) -> list[TokenInfoWithIsCaller]:
"""Get the tokens info"""
caller_name = get_token_name(caller_token)
tokens = get_tokens_info()
return [
TokenInfoWithIsCaller(
name=token.name,
date=token.date,
is_caller=token.name == caller_name,
)
for token in tokens
]
class NotFoundException(Exception):
"""Not found exception"""
class CannotDeleteCallerException(Exception):
"""Cannot delete caller exception"""
def delete_api_token(caller_token: str, token_name: str) -> None:
"""Delete the token"""
if is_token_name_pair_valid(token_name, caller_token):
raise CannotDeleteCallerException("Cannot delete caller's token")
if not is_token_name_exists(token_name):
raise NotFoundException("Token not found")
delete_token(token_name)
def refresh_api_token(caller_token: str) -> str:
"""Refresh the token"""
new_token = refresh_token(caller_token)
if new_token is None:
raise NotFoundException("Token not found")
return new_token
class RecoveryTokenStatus(BaseModel):
"""Recovery token status"""
exists: bool
valid: bool
date: Optional[datetime] = None
expiration: Optional[datetime] = None
uses_left: Optional[int] = None
def get_api_recovery_token_status() -> RecoveryTokenStatus:
"""Get the recovery token status"""
if not is_recovery_token_exists():
return RecoveryTokenStatus(exists=False, valid=False)
status = get_recovery_token_status()
if status is None:
return RecoveryTokenStatus(exists=False, valid=False)
is_valid = is_recovery_token_valid()
return RecoveryTokenStatus(
exists=True,
valid=is_valid,
date=status["date"],
expiration=status["expiration"],
uses_left=status["uses_left"],
)
class InvalidExpirationDate(Exception):
"""Invalid expiration date exception"""
class InvalidUsesLeft(Exception):
"""Invalid uses left exception"""
def get_new_api_recovery_key(
expiration_date: Optional[datetime] = None, uses_left: Optional[int] = None
) -> str:
"""Get new recovery key"""
if expiration_date is not None:
current_time = datetime.now().timestamp()
if expiration_date.timestamp() < current_time:
raise InvalidExpirationDate("Expiration date is in the past")
if uses_left is not None:
if uses_left <= 0:
raise InvalidUsesLeft("Uses must be greater than 0")
key = generate_recovery_token(expiration_date, uses_left)
return key

View file

@ -0,0 +1,149 @@
"""Actions to manage the SSH."""
from typing import Optional
from pydantic import BaseModel
from selfprivacy_api.actions.users import (
UserNotFound,
ensure_ssh_and_users_fields_exist,
)
from selfprivacy_api.utils import WriteUserData, ReadUserData, validate_ssh_public_key
def enable_ssh():
with WriteUserData() as data:
if "ssh" not in data:
data["ssh"] = {}
data["ssh"]["enable"] = True
class UserdataSshSettings(BaseModel):
"""Settings for the SSH."""
enable: bool = True
passwordAuthentication: bool = True
rootKeys: list[str] = []
def get_ssh_settings() -> UserdataSshSettings:
with ReadUserData() as data:
if "ssh" not in data:
return UserdataSshSettings()
if "enable" not in data["ssh"]:
data["ssh"]["enable"] = True
if "passwordAuthentication" not in data["ssh"]:
data["ssh"]["passwordAuthentication"] = True
if "rootKeys" not in data["ssh"]:
data["ssh"]["rootKeys"] = []
return UserdataSshSettings(**data["ssh"])
def set_ssh_settings(
enable: Optional[bool] = None, password_authentication: Optional[bool] = None
) -> None:
with WriteUserData() as data:
if "ssh" not in data:
data["ssh"] = {}
if enable is not None:
data["ssh"]["enable"] = enable
if password_authentication is not None:
data["ssh"]["passwordAuthentication"] = password_authentication
def add_root_ssh_key(public_key: str):
with WriteUserData() as data:
if "ssh" not in data:
data["ssh"] = {}
if "rootKeys" not in data["ssh"]:
data["ssh"]["rootKeys"] = []
# Return 409 if key already in array
for key in data["ssh"]["rootKeys"]:
if key == public_key:
raise KeyAlreadyExists()
data["ssh"]["rootKeys"].append(public_key)
class KeyAlreadyExists(Exception):
"""Key already exists"""
pass
class InvalidPublicKey(Exception):
"""Invalid public key"""
pass
def create_ssh_key(username: str, ssh_key: str):
"""Create a new ssh key"""
if not validate_ssh_public_key(ssh_key):
raise InvalidPublicKey()
with WriteUserData() as data:
ensure_ssh_and_users_fields_exist(data)
if username == data["username"]:
if ssh_key in data["sshKeys"]:
raise KeyAlreadyExists()
data["sshKeys"].append(ssh_key)
return
if username == "root":
if ssh_key in data["ssh"]["rootKeys"]:
raise KeyAlreadyExists()
data["ssh"]["rootKeys"].append(ssh_key)
return
for user in data["users"]:
if user["username"] == username:
if "sshKeys" not in user:
user["sshKeys"] = []
if ssh_key in user["sshKeys"]:
raise KeyAlreadyExists()
user["sshKeys"].append(ssh_key)
return
raise UserNotFound()
class KeyNotFound(Exception):
"""Key not found"""
pass
def remove_ssh_key(username: str, ssh_key: str):
"""Delete a ssh key"""
with WriteUserData() as data:
ensure_ssh_and_users_fields_exist(data)
if username == "root":
if ssh_key in data["ssh"]["rootKeys"]:
data["ssh"]["rootKeys"].remove(ssh_key)
return
raise KeyNotFound()
if username == data["username"]:
if ssh_key in data["sshKeys"]:
data["sshKeys"].remove(ssh_key)
return
raise KeyNotFound()
for user in data["users"]:
if user["username"] == username:
if "sshKeys" not in user:
user["sshKeys"] = []
if ssh_key in user["sshKeys"]:
user["sshKeys"].remove(ssh_key)
return
raise KeyNotFound()
raise UserNotFound()

View file

@ -0,0 +1,139 @@
"""Actions to manage the system."""
import os
import subprocess
import pytz
from typing import Optional
from pydantic import BaseModel
from selfprivacy_api.utils import WriteUserData, ReadUserData
def get_timezone() -> str:
"""Get the timezone of the server"""
with ReadUserData() as user_data:
if "timezone" in user_data:
return user_data["timezone"]
return "Europe/Uzhgorod"
class InvalidTimezone(Exception):
"""Invalid timezone"""
pass
def change_timezone(timezone: str) -> None:
"""Change the timezone of the server"""
if timezone not in pytz.all_timezones:
raise InvalidTimezone(f"Invalid timezone: {timezone}")
with WriteUserData() as user_data:
user_data["timezone"] = timezone
class UserDataAutoUpgradeSettings(BaseModel):
"""Settings for auto-upgrading user data"""
enable: bool = True
allowReboot: bool = False
def get_auto_upgrade_settings() -> UserDataAutoUpgradeSettings:
"""Get the auto-upgrade settings"""
with ReadUserData() as user_data:
if "autoUpgrade" in user_data:
return UserDataAutoUpgradeSettings(**user_data["autoUpgrade"])
return UserDataAutoUpgradeSettings()
def set_auto_upgrade_settings(
enalbe: Optional[bool] = None, allowReboot: Optional[bool] = None
) -> None:
"""Set the auto-upgrade settings"""
with WriteUserData() as user_data:
if "autoUpgrade" not in user_data:
user_data["autoUpgrade"] = {}
if enalbe is not None:
user_data["autoUpgrade"]["enable"] = enalbe
if allowReboot is not None:
user_data["autoUpgrade"]["allowReboot"] = allowReboot
def rebuild_system() -> int:
"""Rebuild the system"""
rebuild_result = subprocess.Popen(
["systemctl", "start", "sp-nixos-rebuild.service"], start_new_session=True
)
rebuild_result.communicate()[0]
return rebuild_result.returncode
def rollback_system() -> int:
"""Rollback the system"""
rollback_result = subprocess.Popen(
["systemctl", "start", "sp-nixos-rollback.service"], start_new_session=True
)
rollback_result.communicate()[0]
return rollback_result.returncode
def upgrade_system() -> int:
"""Upgrade the system"""
upgrade_result = subprocess.Popen(
["systemctl", "start", "sp-nixos-upgrade.service"], start_new_session=True
)
upgrade_result.communicate()[0]
return upgrade_result.returncode
def reboot_system() -> None:
"""Reboot the system"""
subprocess.Popen(["reboot"], start_new_session=True)
def get_system_version() -> str:
"""Get system version"""
return subprocess.check_output(["uname", "-a"]).decode("utf-8").strip()
def get_python_version() -> str:
"""Get Python version"""
return subprocess.check_output(["python", "-V"]).decode("utf-8").strip()
class SystemActionResult(BaseModel):
"""System action result"""
status: int
message: str
data: str
def pull_repository_changes() -> SystemActionResult:
"""Pull repository changes"""
git_pull_command = ["git", "pull"]
current_working_directory = os.getcwd()
os.chdir("/etc/nixos")
git_pull_process_descriptor = subprocess.Popen(
git_pull_command,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
shell=False,
)
data = git_pull_process_descriptor.communicate()[0].decode("utf-8")
os.chdir(current_working_directory)
if git_pull_process_descriptor.returncode == 0:
return SystemActionResult(
status=0,
message="Pulled repository changes",
data=data,
)
return SystemActionResult(
status=git_pull_process_descriptor.returncode,
message="Failed to pull repository changes",
data=data,
)

View file

@ -0,0 +1,219 @@
"""Actions to manage the users."""
import re
from typing import Optional
from pydantic import BaseModel
from enum import Enum
from selfprivacy_api.utils import (
ReadUserData,
WriteUserData,
hash_password,
is_username_forbidden,
)
class UserDataUserOrigin(Enum):
"""Origin of the user in the user data"""
NORMAL = "NORMAL"
PRIMARY = "PRIMARY"
ROOT = "ROOT"
class UserDataUser(BaseModel):
"""The user model from the userdata file"""
username: str
ssh_keys: list[str]
origin: UserDataUserOrigin
def ensure_ssh_and_users_fields_exist(data):
if "ssh" not in data:
data["ssh"] = {}
data["ssh"]["rootKeys"] = []
elif data["ssh"].get("rootKeys") is None:
data["ssh"]["rootKeys"] = []
if "sshKeys" not in data:
data["sshKeys"] = []
if "users" not in data:
data["users"] = []
def get_users(
exclude_primary: bool = False,
exclude_root: bool = False,
) -> list[UserDataUser]:
"""Get the list of users"""
users = []
with ReadUserData() as user_data:
ensure_ssh_and_users_fields_exist(user_data)
users = [
UserDataUser(
username=user["username"],
ssh_keys=user.get("sshKeys", []),
origin=UserDataUserOrigin.NORMAL,
)
for user in user_data["users"]
]
if not exclude_primary:
users.append(
UserDataUser(
username=user_data["username"],
ssh_keys=user_data["sshKeys"],
origin=UserDataUserOrigin.PRIMARY,
)
)
if not exclude_root:
users.append(
UserDataUser(
username="root",
ssh_keys=user_data["ssh"]["rootKeys"],
origin=UserDataUserOrigin.ROOT,
)
)
return users
class UsernameForbidden(Exception):
"""Attemted to create a user with a forbidden username"""
pass
class UserAlreadyExists(Exception):
"""Attemted to create a user that already exists"""
pass
class UsernameNotAlphanumeric(Exception):
"""Attemted to create a user with a non-alphanumeric username"""
pass
class UsernameTooLong(Exception):
"""Attemted to create a user with a too long username. Username must be less than 32 characters"""
pass
class PasswordIsEmpty(Exception):
"""Attemted to create a user with an empty password"""
pass
def create_user(username: str, password: str):
if password == "":
raise PasswordIsEmpty("Password is empty")
if is_username_forbidden(username):
raise UsernameForbidden("Username is forbidden")
if not re.match(r"^[a-z_][a-z0-9_]+$", username):
raise UsernameNotAlphanumeric(
"Username must be alphanumeric and start with a letter"
)
if len(username) >= 32:
raise UsernameTooLong("Username must be less than 32 characters")
with ReadUserData() as user_data:
ensure_ssh_and_users_fields_exist(user_data)
if username == user_data["username"]:
raise UserAlreadyExists("User already exists")
if username in [user["username"] for user in user_data["users"]]:
raise UserAlreadyExists("User already exists")
hashed_password = hash_password(password)
with WriteUserData() as user_data:
ensure_ssh_and_users_fields_exist(user_data)
user_data["users"].append(
{"username": username, "sshKeys": [], "hashedPassword": hashed_password}
)
class UserNotFound(Exception):
"""Attemted to get a user that does not exist"""
pass
class UserIsProtected(Exception):
"""Attemted to delete a user that is protected"""
pass
def delete_user(username: str):
with WriteUserData() as user_data:
ensure_ssh_and_users_fields_exist(user_data)
if username == user_data["username"] or username == "root":
raise UserIsProtected("Cannot delete main or root user")
for data_user in user_data["users"]:
if data_user["username"] == username:
user_data["users"].remove(data_user)
break
else:
raise UserNotFound("User did not exist")
def update_user(username: str, password: str):
if password == "":
raise PasswordIsEmpty("Password is empty")
hashed_password = hash_password(password)
with WriteUserData() as data:
ensure_ssh_and_users_fields_exist(data)
if username == data["username"]:
data["hashedMasterPassword"] = hashed_password
# Return 404 if user does not exist
else:
for data_user in data["users"]:
if data_user["username"] == username:
data_user["hashedPassword"] = hashed_password
break
else:
raise UserNotFound("User does not exist")
def get_user_by_username(username: str) -> Optional[UserDataUser]:
with ReadUserData() as data:
ensure_ssh_and_users_fields_exist(data)
if username == "root":
return UserDataUser(
origin=UserDataUserOrigin.ROOT,
username="root",
ssh_keys=data["ssh"]["rootKeys"],
)
if username == data["username"]:
return UserDataUser(
origin=UserDataUserOrigin.PRIMARY,
username=username,
ssh_keys=data["sshKeys"],
)
for user in data["users"]:
if user["username"] == username:
if "sshKeys" not in user:
user["sshKeys"] = []
return UserDataUser(
origin=UserDataUserOrigin.NORMAL,
username=username,
ssh_keys=user["sshKeys"],
)
return None

View file

@ -1,110 +1,56 @@
#!/usr/bin/env python3
"""SelfPrivacy server management API"""
import os
from gevent import monkey
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from strawberry.fastapi import GraphQLRouter
import uvicorn
from flask import Flask, request, jsonify
from flask_restful import Api
from flask_swagger import swagger
from flask_swagger_ui import get_swaggerui_blueprint
from flask_cors import CORS
from strawberry.flask.views import AsyncGraphQLView
from selfprivacy_api.resources.users import User, Users
from selfprivacy_api.resources.common import ApiVersion
from selfprivacy_api.resources.system import api_system
from selfprivacy_api.resources.services import services as api_services
from selfprivacy_api.resources.api_auth import auth as api_auth
from selfprivacy_api.restic_controller.tasks import huey, init_restic
from selfprivacy_api.migrations import run_migrations
from selfprivacy_api.utils.auth import is_token_valid
from selfprivacy_api.dependencies import get_api_version
from selfprivacy_api.graphql.schema import schema
from selfprivacy_api.migrations import run_migrations
from selfprivacy_api.restic_controller.tasks import init_restic
swagger_blueprint = get_swaggerui_blueprint(
"/api/docs", "/api/swagger.json", config={"app_name": "SelfPrivacy API"}
from selfprivacy_api.rest import (
system,
users,
api_auth,
services,
)
app = FastAPI()
graphql_app = GraphQLRouter(
schema,
)
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
def create_app(test_config=None):
"""Initiate Flask app and bind routes"""
app = Flask(__name__)
api = Api(app)
CORS(app)
app.include_router(system.router)
app.include_router(users.router)
app.include_router(api_auth.router)
app.include_router(services.router)
app.include_router(graphql_app, prefix="/graphql")
if test_config is None:
app.config["ENABLE_SWAGGER"] = os.environ.get("ENABLE_SWAGGER", "0")
app.config["B2_BUCKET"] = os.environ.get("B2_BUCKET")
else:
app.config.update(test_config)
# Check bearer token
@app.before_request
def check_auth():
# Exclude swagger-ui, /auth/new_device/authorize, /auth/recovery_token/use
if request.path.startswith("/api"):
pass
elif request.path.startswith("/auth/new_device/authorize"):
pass
elif request.path.startswith("/auth/recovery_token/use"):
pass
elif request.path.startswith("/graphql"):
pass
else:
auth = request.headers.get("Authorization")
if auth is None:
return jsonify({"error": "Missing Authorization header"}), 401
# Strip Bearer from auth header
auth = auth.replace("Bearer ", "")
if not is_token_valid(auth):
return jsonify({"error": "Invalid token"}), 401
@app.get("/api/version")
async def get_version():
"""Get the version of the server"""
return {"version": get_api_version()}
api.add_resource(ApiVersion, "/api/version")
api.add_resource(Users, "/users")
api.add_resource(User, "/users/<string:username>")
app.register_blueprint(api_system)
app.register_blueprint(api_services)
app.register_blueprint(api_auth)
@app.route("/api/swagger.json")
def spec():
if app.config["ENABLE_SWAGGER"] == "1":
swag = swagger(app)
swag["info"]["version"] = "1.2.7"
swag["info"]["title"] = "SelfPrivacy API"
swag["info"]["description"] = "SelfPrivacy API"
swag["securityDefinitions"] = {
"bearerAuth": {
"type": "apiKey",
"name": "Authorization",
"in": "header",
}
}
swag["security"] = [{"bearerAuth": []}]
return jsonify(swag)
return jsonify({}), 404
app.add_url_rule(
"/graphql", view_func=AsyncGraphQLView.as_view("graphql", schema=schema)
)
if app.config["ENABLE_SWAGGER"] == "1":
app.register_blueprint(swagger_blueprint, url_prefix="/api/docs")
return app
@app.on_event("startup")
async def startup():
run_migrations()
init_restic()
if __name__ == "__main__":
monkey.patch_all()
created_app = create_app()
run_migrations()
huey.start()
init_restic()
created_app.run(port=5050, debug=False)
uvicorn.run("selfprivacy_api.app:app", host="0.0.0.0", port=5050, log_level="info")

View file

@ -0,0 +1,30 @@
from fastapi import Depends, HTTPException, status
from fastapi.security import APIKeyHeader
from pydantic import BaseModel
from selfprivacy_api.utils.auth import is_token_valid
class TokenHeader(BaseModel):
token: str
async def get_token_header(
token: str = Depends(APIKeyHeader(name="Authorization", auto_error=False))
) -> TokenHeader:
if token is None:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, detail="Token not provided"
)
else:
token = token.replace("Bearer ", "")
if not is_token_valid(token):
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid token"
)
return TokenHeader(token=token)
def get_api_version() -> str:
"""Get API version"""
return "2.0.0"

View file

@ -3,7 +3,6 @@
import typing
from strawberry.permission import BasePermission
from strawberry.types import Info
from flask import request
from selfprivacy_api.utils.auth import is_token_valid
@ -14,11 +13,9 @@ class IsAuthenticated(BasePermission):
message = "You must be authenticated to access this resource."
def has_permission(self, source: typing.Any, info: Info, **kwargs) -> bool:
auth = request.headers.get("Authorization")
if auth is None:
token = info.context["request"].headers.get("Authorization")
if token is None:
token = info.context["request"].query_params.get("token")
if token is None:
return False
# Strip Bearer from auth header
auth = auth.replace("Bearer ", "")
if not is_token_valid(auth):
return False
return True
return is_token_valid(token.replace("Bearer ", ""))

View file

@ -0,0 +1,13 @@
import typing
import strawberry
@strawberry.type
class DnsRecord:
"""DNS record"""
record_type: str
name: str
content: str
ttl: int
priority: typing.Optional[int]

View file

@ -0,0 +1,49 @@
"""Jobs status"""
# pylint: disable=too-few-public-methods
import datetime
import typing
import strawberry
from selfprivacy_api.jobs import Job, Jobs
@strawberry.type
class ApiJob:
"""Job type for GraphQL."""
uid: str
name: str
description: str
status: str
status_text: typing.Optional[str]
progress: typing.Optional[int]
created_at: datetime.datetime
updated_at: datetime.datetime
finished_at: typing.Optional[datetime.datetime]
error: typing.Optional[str]
result: typing.Optional[str]
def job_to_api_job(job: Job) -> ApiJob:
"""Convert a Job from jobs controller to a GraphQL ApiJob."""
return ApiJob(
uid=str(job.uid),
name=job.name,
description=job.description,
status=job.status.name,
status_text=job.status_text,
progress=job.progress,
created_at=job.created_at,
updated_at=job.updated_at,
finished_at=job.finished_at,
error=job.error,
result=job.result,
)
def get_api_job_by_id(job_id: str) -> typing.Optional[ApiJob]:
"""Get a job for GraphQL by its ID."""
job = Jobs.get_instance().get_job(job_id)
if job is None:
return None
return job_to_api_job(job)

View file

@ -0,0 +1,146 @@
from enum import Enum
import typing
import strawberry
from selfprivacy_api.graphql.common_types.dns import DnsRecord
from selfprivacy_api.services import get_service_by_id, get_services_by_location
from selfprivacy_api.services import Service as ServiceInterface
from selfprivacy_api.utils.block_devices import BlockDevices
def get_usages(root: "StorageVolume") -> list["StorageUsageInterface"]:
"""Get usages of a volume"""
return [
ServiceStorageUsage(
service=service_to_graphql_service(service),
title=service.get_display_name(),
used_space=str(service.get_storage_usage()),
volume=get_volume_by_id(service.get_location()),
)
for service in get_services_by_location(root.name)
]
@strawberry.type
class StorageVolume:
"""Stats and basic info about a volume or a system disk."""
total_space: str
free_space: str
used_space: str
root: bool
name: str
model: typing.Optional[str]
serial: typing.Optional[str]
type: str
@strawberry.field
def usages(self) -> list["StorageUsageInterface"]:
"""Get usages of a volume"""
return get_usages(self)
@strawberry.interface
class StorageUsageInterface:
used_space: str
volume: typing.Optional[StorageVolume]
title: str
@strawberry.type
class ServiceStorageUsage(StorageUsageInterface):
"""Storage usage for a service"""
service: typing.Optional["Service"]
@strawberry.enum
class ServiceStatusEnum(Enum):
ACTIVE = "ACTIVE"
RELOADING = "RELOADING"
INACTIVE = "INACTIVE"
FAILED = "FAILED"
ACTIVATING = "ACTIVATING"
DEACTIVATING = "DEACTIVATING"
OFF = "OFF"
def get_storage_usage(root: "Service") -> ServiceStorageUsage:
"""Get storage usage for a service"""
service = get_service_by_id(root.id)
if service is None:
return ServiceStorageUsage(
service=service,
title="Not found",
used_space="0",
volume=get_volume_by_id("sda1"),
)
return ServiceStorageUsage(
service=service_to_graphql_service(service),
title=service.get_display_name(),
used_space=str(service.get_storage_usage()),
volume=get_volume_by_id(service.get_location()),
)
@strawberry.type
class Service:
id: str
display_name: str
description: str
svg_icon: str
is_movable: bool
is_required: bool
is_enabled: bool
status: ServiceStatusEnum
url: typing.Optional[str]
dns_records: typing.Optional[typing.List[DnsRecord]]
@strawberry.field
def storage_usage(self) -> ServiceStorageUsage:
"""Get storage usage for a service"""
return get_storage_usage(self)
def service_to_graphql_service(service: ServiceInterface) -> Service:
"""Convert service to graphql service"""
return Service(
id=service.get_id(),
display_name=service.get_display_name(),
description=service.get_description(),
svg_icon=service.get_svg_icon(),
is_movable=service.is_movable(),
is_required=service.is_required(),
is_enabled=service.is_enabled(),
status=ServiceStatusEnum(service.get_status().value),
url=service.get_url(),
dns_records=[
DnsRecord(
record_type=record.type,
name=record.name,
content=record.content,
ttl=record.ttl,
priority=record.priority,
)
for record in service.get_dns_records()
],
)
def get_volume_by_id(volume_id: str) -> typing.Optional[StorageVolume]:
"""Get volume by id"""
volume = BlockDevices().get_block_device(volume_id)
if volume is None:
return None
return StorageVolume(
total_space=str(volume.fssize)
if volume.fssize is not None
else str(volume.size),
free_space=str(volume.fsavail),
used_space=str(volume.fsused),
root=volume.name == "sda1",
name=volume.name,
model=volume.model,
serial=volume.serial,
type=volume.type,
)

View file

@ -1,8 +1,8 @@
import typing
from enum import Enum
import strawberry
import selfprivacy_api.actions.users as users_actions
from selfprivacy_api.utils import ReadUserData
from selfprivacy_api.graphql.mutations.mutation_interface import (
MutationReturnInterface,
)
@ -28,51 +28,30 @@ class User:
class UserMutationReturn(MutationReturnInterface):
"""Return type for user mutation"""
user: typing.Optional[User]
def ensure_ssh_and_users_fields_exist(data):
if "ssh" not in data:
data["ssh"] = []
data["ssh"]["rootKeys"] = []
elif data["ssh"].get("rootKeys") is None:
data["ssh"]["rootKeys"] = []
if "sshKeys" not in data:
data["sshKeys"] = []
if "users" not in data:
data["users"] = []
user: typing.Optional[User] = None
def get_user_by_username(username: str) -> typing.Optional[User]:
with ReadUserData() as data:
ensure_ssh_and_users_fields_exist(data)
if username == "root":
return User(
user_type=UserType.ROOT,
username="root",
ssh_keys=data["ssh"]["rootKeys"],
)
if username == data["username"]:
return User(
user_type=UserType.PRIMARY,
username=username,
ssh_keys=data["sshKeys"],
)
for user in data["users"]:
if user["username"] == username:
if "sshKeys" not in user:
user["sshKeys"] = []
return User(
user_type=UserType.NORMAL,
username=username,
ssh_keys=user["sshKeys"],
)
user = users_actions.get_user_by_username(username)
if user is None:
return None
return User(
user_type=UserType(user.origin.value),
username=user.username,
ssh_keys=user.ssh_keys,
)
def get_users() -> typing.List[User]:
"""Get users"""
users = users_actions.get_users(exclude_root=True)
return [
User(
user_type=UserType(user.origin.value),
username=user.username,
ssh_keys=user.ssh_keys,
)
for user in users
]

View file

@ -2,8 +2,16 @@
# pylint: disable=too-few-public-methods
import datetime
import typing
from flask import request
import strawberry
from strawberry.types import Info
from selfprivacy_api.actions.api_tokens import (
CannotDeleteCallerException,
InvalidExpirationDate,
InvalidUsesLeft,
NotFoundException,
delete_api_token,
get_new_api_recovery_key,
)
from selfprivacy_api.graphql import IsAuthenticated
from selfprivacy_api.graphql.mutations.mutation_interface import (
GenericMutationReturn,
@ -12,11 +20,7 @@ from selfprivacy_api.graphql.mutations.mutation_interface import (
from selfprivacy_api.utils.auth import (
delete_new_device_auth_token,
delete_token,
generate_recovery_token,
get_new_device_auth_token,
is_token_name_exists,
is_token_name_pair_valid,
refresh_token,
use_mnemonic_recoverery_token,
use_new_device_auth_token,
@ -64,27 +68,24 @@ class ApiMutations:
self, limits: typing.Optional[RecoveryKeyLimitsInput] = None
) -> ApiKeyMutationReturn:
"""Generate recovery key"""
if limits is not None:
if limits.expiration_date is not None:
if limits.expiration_date < datetime.datetime.now():
return ApiKeyMutationReturn(
success=False,
message="Expiration date must be in the future",
code=400,
key=None,
)
if limits.uses is not None:
if limits.uses < 1:
return ApiKeyMutationReturn(
success=False,
message="Uses must be greater than 0",
code=400,
key=None,
)
if limits is not None:
key = generate_recovery_token(limits.expiration_date, limits.uses)
else:
key = generate_recovery_token(None, None)
if limits is None:
limits = RecoveryKeyLimitsInput()
try:
key = get_new_api_recovery_key(limits.expiration_date, limits.uses)
except InvalidExpirationDate:
return ApiKeyMutationReturn(
success=False,
message="Expiration date must be in the future",
code=400,
key=None,
)
except InvalidUsesLeft:
return ApiKeyMutationReturn(
success=False,
message="Uses must be greater than 0",
code=400,
key=None,
)
return ApiKeyMutationReturn(
success=True,
message="Recovery key generated",
@ -113,12 +114,12 @@ class ApiMutations:
)
@strawberry.mutation(permission_classes=[IsAuthenticated])
def refresh_device_api_token(self) -> DeviceApiTokenMutationReturn:
def refresh_device_api_token(self, info: Info) -> DeviceApiTokenMutationReturn:
"""Refresh device api token"""
token = (
request.headers.get("Authorization").split(" ")[1]
if request.headers.get("Authorization") is not None
else None
info.context["request"]
.headers.get("Authorization", "")
.replace("Bearer ", "")
)
if token is None:
return DeviceApiTokenMutationReturn(
@ -143,26 +144,33 @@ class ApiMutations:
)
@strawberry.mutation(permission_classes=[IsAuthenticated])
def delete_device_api_token(self, device: str) -> GenericMutationReturn:
def delete_device_api_token(self, device: str, info: Info) -> GenericMutationReturn:
"""Delete device api token"""
self_token = (
request.headers.get("Authorization").split(" ")[1]
if request.headers.get("Authorization") is not None
else None
info.context["request"]
.headers.get("Authorization", "")
.replace("Bearer ", "")
)
if self_token is not None and is_token_name_pair_valid(device, self_token):
return GenericMutationReturn(
success=False,
message="Cannot delete caller's token",
code=400,
)
if not is_token_name_exists(device):
try:
delete_api_token(self_token, device)
except NotFoundException:
return GenericMutationReturn(
success=False,
message="Token not found",
code=404,
)
delete_token(device)
except CannotDeleteCallerException:
return GenericMutationReturn(
success=False,
message="Cannot delete caller token",
code=400,
)
except Exception as e:
return GenericMutationReturn(
success=False,
message=str(e),
code=500,
)
return GenericMutationReturn(
success=True,
message="Token deleted",

View file

@ -0,0 +1,27 @@
"""Manipulate jobs"""
# pylint: disable=too-few-public-methods
import strawberry
from selfprivacy_api.graphql.mutations.mutation_interface import GenericMutationReturn
from selfprivacy_api.jobs import Jobs
@strawberry.type
class JobMutations:
"""Mutations related to jobs"""
@strawberry.mutation
def remove_job(self, job_id: str) -> GenericMutationReturn:
"""Remove a job from the queue"""
result = Jobs().remove_by_uuid(job_id)
if result:
return GenericMutationReturn(
success=True,
code=200,
message="Job removed",
)
return GenericMutationReturn(
success=False,
code=404,
message="Job not found",
)

View file

@ -1,4 +1,7 @@
import strawberry
import typing
from selfprivacy_api.graphql.common_types.jobs import ApiJob
@strawberry.interface
@ -11,3 +14,8 @@ class MutationReturnInterface:
@strawberry.type
class GenericMutationReturn(MutationReturnInterface):
pass
@strawberry.type
class GenericJobButationReturn(MutationReturnInterface):
job: typing.Optional[ApiJob] = None

View file

@ -0,0 +1,169 @@
"""Services mutations"""
# pylint: disable=too-few-public-methods
import typing
import strawberry
from selfprivacy_api.graphql import IsAuthenticated
from selfprivacy_api.graphql.common_types.jobs import job_to_api_job
from selfprivacy_api.graphql.common_types.service import (
Service,
service_to_graphql_service,
)
from selfprivacy_api.graphql.mutations.mutation_interface import (
GenericJobButationReturn,
GenericMutationReturn,
)
from selfprivacy_api.services import get_service_by_id
from selfprivacy_api.utils.block_devices import BlockDevices
@strawberry.type
class ServiceMutationReturn(GenericMutationReturn):
"""Service mutation return type."""
service: typing.Optional[Service] = None
@strawberry.input
class MoveServiceInput:
"""Move service input type."""
service_id: str
location: str
@strawberry.type
class ServiceJobMutationReturn(GenericJobButationReturn):
"""Service job mutation return type."""
service: typing.Optional[Service] = None
@strawberry.type
class ServicesMutations:
"""Services mutations."""
@strawberry.mutation(permission_classes=[IsAuthenticated])
def enable_service(self, service_id: str) -> ServiceMutationReturn:
"""Enable service."""
service = get_service_by_id(service_id)
if service is None:
return ServiceMutationReturn(
success=False,
message="Service not found.",
code=404,
)
service.enable()
return ServiceMutationReturn(
success=True,
message="Service enabled.",
code=200,
service=service_to_graphql_service(service),
)
@strawberry.mutation(permission_classes=[IsAuthenticated])
def disable_service(self, service_id: str) -> ServiceMutationReturn:
"""Disable service."""
service = get_service_by_id(service_id)
if service is None:
return ServiceMutationReturn(
success=False,
message="Service not found.",
code=404,
)
service.disable()
return ServiceMutationReturn(
success=True,
message="Service disabled.",
code=200,
service=service_to_graphql_service(service),
)
@strawberry.mutation(permission_classes=[IsAuthenticated])
def stop_service(self, service_id: str) -> ServiceMutationReturn:
"""Stop service."""
service = get_service_by_id(service_id)
if service is None:
return ServiceMutationReturn(
success=False,
message="Service not found.",
code=404,
)
service.stop()
return ServiceMutationReturn(
success=True,
message="Service stopped.",
code=200,
service=service_to_graphql_service(service),
)
@strawberry.mutation(permission_classes=[IsAuthenticated])
def start_service(self, service_id: str) -> ServiceMutationReturn:
"""Start service."""
service = get_service_by_id(service_id)
if service is None:
return ServiceMutationReturn(
success=False,
message="Service not found.",
code=404,
)
service.start()
return ServiceMutationReturn(
success=True,
message="Service started.",
code=200,
service=service_to_graphql_service(service),
)
@strawberry.mutation(permission_classes=[IsAuthenticated])
def restart_service(self, service_id: str) -> ServiceMutationReturn:
"""Restart service."""
service = get_service_by_id(service_id)
if service is None:
return ServiceMutationReturn(
success=False,
message="Service not found.",
code=404,
)
service.restart()
return ServiceMutationReturn(
success=True,
message="Service restarted.",
code=200,
service=service_to_graphql_service(service),
)
@strawberry.mutation(permission_classes=[IsAuthenticated])
def move_service(self, input: MoveServiceInput) -> ServiceJobMutationReturn:
"""Move service."""
service = get_service_by_id(input.service_id)
if service is None:
return ServiceJobMutationReturn(
success=False,
message="Service not found.",
code=404,
)
if not service.is_movable():
return ServiceJobMutationReturn(
success=False,
message="Service is not movable.",
code=400,
service=service_to_graphql_service(service),
)
volume = BlockDevices().get_block_device(input.location)
if volume is None:
return ServiceJobMutationReturn(
success=False,
message="Volume not found.",
code=404,
service=service_to_graphql_service(service),
)
job = service.move_to_volume(volume)
return ServiceJobMutationReturn(
success=True,
message="Service moved.",
code=200,
service=service_to_graphql_service(service),
job=job_to_api_job(job),
)

View file

@ -3,9 +3,13 @@
# pylint: disable=too-few-public-methods
import strawberry
from selfprivacy_api.actions.users import UserNotFound
from selfprivacy_api.graphql import IsAuthenticated
from selfprivacy_api.graphql.mutations.ssh_utils import (
from selfprivacy_api.actions.ssh import (
InvalidPublicKey,
KeyAlreadyExists,
KeyNotFound,
create_ssh_key,
remove_ssh_key,
)
@ -31,12 +35,37 @@ class SshMutations:
def add_ssh_key(self, ssh_input: SshMutationInput) -> UserMutationReturn:
"""Add a new ssh key"""
success, message, code = create_ssh_key(ssh_input.username, ssh_input.ssh_key)
try:
create_ssh_key(ssh_input.username, ssh_input.ssh_key)
except KeyAlreadyExists:
return UserMutationReturn(
success=False,
message="Key already exists",
code=409,
)
except InvalidPublicKey:
return UserMutationReturn(
success=False,
message="Invalid key type. Only ssh-ed25519 and ssh-rsa are supported",
code=400,
)
except UserNotFound:
return UserMutationReturn(
success=False,
message="User not found",
code=404,
)
except Exception as e:
return UserMutationReturn(
success=False,
message=str(e),
code=500,
)
return UserMutationReturn(
success=success,
message=message,
code=code,
success=True,
message="New SSH key successfully written",
code=201,
user=get_user_by_username(ssh_input.username),
)
@ -44,11 +73,30 @@ class SshMutations:
def remove_ssh_key(self, ssh_input: SshMutationInput) -> UserMutationReturn:
"""Remove ssh key from user"""
success, message, code = remove_ssh_key(ssh_input.username, ssh_input.ssh_key)
try:
remove_ssh_key(ssh_input.username, ssh_input.ssh_key)
except KeyNotFound:
return UserMutationReturn(
success=False,
message="Key not found",
code=404,
)
except UserNotFound:
return UserMutationReturn(
success=False,
message="User not found",
code=404,
)
except Exception as e:
return UserMutationReturn(
success=False,
message=str(e),
code=500,
)
return UserMutationReturn(
success=success,
message=message,
code=code,
success=True,
message="SSH key successfully removed",
code=200,
user=get_user_by_username(ssh_input.username),
)

View file

@ -1,74 +0,0 @@
from selfprivacy_api.graphql.common_types.user import ensure_ssh_and_users_fields_exist
from selfprivacy_api.utils import (
WriteUserData,
validate_ssh_public_key,
)
def create_ssh_key(username: str, ssh_key: str) -> tuple[bool, str, int]:
"""Create a new ssh key"""
if not validate_ssh_public_key(ssh_key):
return (
False,
"Invalid key type. Only ssh-ed25519 and ssh-rsa are supported",
400,
)
with WriteUserData() as data:
ensure_ssh_and_users_fields_exist(data)
if username == data["username"]:
if ssh_key in data["sshKeys"]:
return False, "Key already exists", 409
data["sshKeys"].append(ssh_key)
return True, "New SSH key successfully written", 201
if username == "root":
if ssh_key in data["ssh"]["rootKeys"]:
return False, "Key already exists", 409
data["ssh"]["rootKeys"].append(ssh_key)
return True, "New SSH key successfully written", 201
for user in data["users"]:
if user["username"] == username:
if ssh_key in user["sshKeys"]:
return False, "Key already exists", 409
user["sshKeys"].append(ssh_key)
return True, "New SSH key successfully written", 201
return False, "User not found", 404
def remove_ssh_key(username: str, ssh_key: str) -> tuple[bool, str, int]:
"""Delete a ssh key"""
with WriteUserData() as data:
ensure_ssh_and_users_fields_exist(data)
if username == "root":
if ssh_key in data["ssh"]["rootKeys"]:
data["ssh"]["rootKeys"].remove(ssh_key)
return True, "SSH key deleted", 200
return False, "Key not found", 404
if username == data["username"]:
if ssh_key in data["sshKeys"]:
data["sshKeys"].remove(ssh_key)
return True, "SSH key deleted", 200
return False, "Key not found", 404
for user in data["users"]:
if user["username"] == username:
if ssh_key in user["sshKeys"]:
user["sshKeys"].remove(ssh_key)
return True, "SSH key deleted", 200
return False, "Key not found", 404
return False, "User not found", 404

View file

@ -1,11 +1,28 @@
"""Storage devices mutations"""
import typing
import strawberry
from selfprivacy_api.graphql import IsAuthenticated
from selfprivacy_api.graphql.common_types.jobs import job_to_api_job
from selfprivacy_api.utils.block_devices import BlockDevices
from selfprivacy_api.graphql.mutations.mutation_interface import (
GenericJobButationReturn,
GenericMutationReturn,
)
from selfprivacy_api.jobs.migrate_to_binds import (
BindMigrationConfig,
is_bind_migrated,
start_bind_migration,
)
@strawberry.input
class MigrateToBindsInput:
"""Migrate to binds input"""
email_block_device: str
bitwarden_block_device: str
gitea_block_device: str
nextcloud_block_device: str
pleroma_block_device: str
@strawberry.type
@ -60,3 +77,26 @@ class StorageMutations:
return GenericMutationReturn(
success=False, code=409, message="Volume not unmounted (already unmounted?)"
)
@strawberry.mutation(permission_classes=[IsAuthenticated])
def migrate_to_binds(self, input: MigrateToBindsInput) -> GenericJobButationReturn:
"""Migrate to binds"""
if is_bind_migrated():
return GenericJobButationReturn(
success=False, code=409, message="Already migrated to binds"
)
job = start_bind_migration(
BindMigrationConfig(
email_block_device=input.email_block_device,
bitwarden_block_device=input.bitwarden_block_device,
gitea_block_device=input.gitea_block_device,
nextcloud_block_device=input.nextcloud_block_device,
pleroma_block_device=input.pleroma_block_device,
)
)
return GenericJobButationReturn(
success=True,
code=200,
message="Migration to binds started, rebuild the system to apply changes",
job=job_to_api_job(job),
)

View file

@ -1,15 +1,14 @@
"""System management mutations"""
# pylint: disable=too-few-public-methods
import subprocess
import typing
import pytz
import strawberry
from selfprivacy_api.graphql import IsAuthenticated
from selfprivacy_api.graphql.mutations.mutation_interface import (
GenericMutationReturn,
MutationReturnInterface,
)
from selfprivacy_api.utils import WriteUserData
import selfprivacy_api.actions.system as system_actions
@strawberry.type
@ -42,15 +41,15 @@ class SystemMutations:
@strawberry.mutation(permission_classes=[IsAuthenticated])
def change_timezone(self, timezone: str) -> TimezoneMutationReturn:
"""Change the timezone of the server. Timezone is a tzdatabase name."""
if timezone not in pytz.all_timezones:
try:
system_actions.change_timezone(timezone)
except system_actions.InvalidTimezone as e:
return TimezoneMutationReturn(
success=False,
message="Invalid timezone",
message=str(e),
code=400,
timezone=None,
)
with WriteUserData() as data:
data["timezone"] = timezone
return TimezoneMutationReturn(
success=True,
message="Timezone changed",
@ -63,36 +62,23 @@ class SystemMutations:
self, settings: AutoUpgradeSettingsInput
) -> AutoUpgradeSettingsMutationReturn:
"""Change auto upgrade settings of the server."""
with WriteUserData() as data:
if "autoUpgrade" not in data:
data["autoUpgrade"] = {}
if "enable" not in data["autoUpgrade"]:
data["autoUpgrade"]["enable"] = True
if "allowReboot" not in data["autoUpgrade"]:
data["autoUpgrade"]["allowReboot"] = False
system_actions.set_auto_upgrade_settings(
settings.enableAutoUpgrade, settings.allowReboot
)
if settings.enableAutoUpgrade is not None:
data["autoUpgrade"]["enable"] = settings.enableAutoUpgrade
if settings.allowReboot is not None:
data["autoUpgrade"]["allowReboot"] = settings.allowReboot
auto_upgrade = data["autoUpgrade"]["enable"]
allow_reboot = data["autoUpgrade"]["allowReboot"]
new_settings = system_actions.get_auto_upgrade_settings()
return AutoUpgradeSettingsMutationReturn(
success=True,
message="Auto-upgrade settings changed",
code=200,
enableAutoUpgrade=auto_upgrade,
allowReboot=allow_reboot,
enableAutoUpgrade=new_settings.enable,
allowReboot=new_settings.allowReboot,
)
@strawberry.mutation(permission_classes=[IsAuthenticated])
def run_system_rebuild(self) -> GenericMutationReturn:
rebuild_result = subprocess.Popen(
["systemctl", "start", "sp-nixos-rebuild.service"], start_new_session=True
)
rebuild_result.communicate()[0]
system_actions.rebuild_system()
return GenericMutationReturn(
success=True,
message="Starting rebuild system",
@ -101,10 +87,7 @@ class SystemMutations:
@strawberry.mutation(permission_classes=[IsAuthenticated])
def run_system_rollback(self) -> GenericMutationReturn:
rollback_result = subprocess.Popen(
["systemctl", "start", "sp-nixos-rollback.service"], start_new_session=True
)
rollback_result.communicate()[0]
system_actions.rollback_system()
return GenericMutationReturn(
success=True,
message="Starting rebuild system",
@ -113,10 +96,7 @@ class SystemMutations:
@strawberry.mutation(permission_classes=[IsAuthenticated])
def run_system_upgrade(self) -> GenericMutationReturn:
upgrade_result = subprocess.Popen(
["systemctl", "start", "sp-nixos-upgrade.service"], start_new_session=True
)
upgrade_result.communicate()[0]
system_actions.upgrade_system()
return GenericMutationReturn(
success=True,
message="Starting rebuild system",
@ -125,9 +105,24 @@ class SystemMutations:
@strawberry.mutation(permission_classes=[IsAuthenticated])
def reboot_system(self) -> GenericMutationReturn:
subprocess.Popen(["reboot"], start_new_session=True)
system_actions.reboot_system()
return GenericMutationReturn(
success=True,
message="System reboot has started",
code=200,
)
@strawberry.mutation(permission_classes=[IsAuthenticated])
def pull_repository_changes(self) -> GenericMutationReturn:
result = system_actions.pull_repository_changes()
if result.status == 0:
return GenericMutationReturn(
success=True,
message="Repository changes pulled",
code=200,
)
return GenericMutationReturn(
success=False,
message=f"Failed to pull repository changes:\n{result.data}",
code=500,
)

View file

@ -10,11 +10,7 @@ from selfprivacy_api.graphql.common_types.user import (
from selfprivacy_api.graphql.mutations.mutation_interface import (
GenericMutationReturn,
)
from selfprivacy_api.graphql.mutations.users_utils import (
create_user,
delete_user,
update_user,
)
import selfprivacy_api.actions.users as users_actions
@strawberry.input
@ -31,35 +27,91 @@ class UserMutations:
@strawberry.mutation(permission_classes=[IsAuthenticated])
def create_user(self, user: UserMutationInput) -> UserMutationReturn:
success, message, code = create_user(user.username, user.password)
try:
users_actions.create_user(user.username, user.password)
except users_actions.PasswordIsEmpty as e:
return UserMutationReturn(
success=False,
message=str(e),
code=400,
)
except users_actions.UsernameForbidden as e:
return UserMutationReturn(
success=False,
message=str(e),
code=409,
)
except users_actions.UsernameNotAlphanumeric as e:
return UserMutationReturn(
success=False,
message=str(e),
code=400,
)
except users_actions.UsernameTooLong as e:
return UserMutationReturn(
success=False,
message=str(e),
code=400,
)
except users_actions.UserAlreadyExists as e:
return UserMutationReturn(
success=False,
message=str(e),
code=409,
user=get_user_by_username(user.username),
)
return UserMutationReturn(
success=success,
message=message,
code=code,
success=True,
message="User created",
code=201,
user=get_user_by_username(user.username),
)
@strawberry.mutation(permission_classes=[IsAuthenticated])
def delete_user(self, username: str) -> GenericMutationReturn:
success, message, code = delete_user(username)
try:
users_actions.delete_user(username)
except users_actions.UserNotFound as e:
return GenericMutationReturn(
success=False,
message=str(e),
code=404,
)
except users_actions.UserIsProtected as e:
return GenericMutationReturn(
success=False,
message=str(e),
code=400,
)
return GenericMutationReturn(
success=success,
message=message,
code=code,
success=True,
message="User deleted",
code=200,
)
@strawberry.mutation(permission_classes=[IsAuthenticated])
def update_user(self, user: UserMutationInput) -> UserMutationReturn:
"""Update user mutation"""
success, message, code = update_user(user.username, user.password)
try:
users_actions.update_user(user.username, user.password)
except users_actions.PasswordIsEmpty as e:
return UserMutationReturn(
success=False,
message=str(e),
code=400,
)
except users_actions.UserNotFound as e:
return UserMutationReturn(
success=False,
message=str(e),
code=404,
)
return UserMutationReturn(
success=success,
message=message,
code=code,
success=True,
message="User updated",
code=200,
user=get_user_by_username(user.username),
)

View file

@ -1,111 +0,0 @@
import re
from selfprivacy_api.utils import (
WriteUserData,
ReadUserData,
is_username_forbidden,
)
from selfprivacy_api.utils import hash_password
def ensure_ssh_and_users_fields_exist(data):
if "ssh" not in data:
data["ssh"] = []
data["ssh"]["rootKeys"] = []
elif data["ssh"].get("rootKeys") is None:
data["ssh"]["rootKeys"] = []
if "sshKeys" not in data:
data["sshKeys"] = []
if "users" not in data:
data["users"] = []
def create_user(username: str, password: str) -> tuple[bool, str, int]:
"""Create a new user"""
# Check if password is null or none
if password == "":
return False, "Password is null", 400
# Check if username is forbidden
if is_username_forbidden(username):
return False, "Username is forbidden", 409
# Check is username passes regex
if not re.match(r"^[a-z_][a-z0-9_]+$", username):
return False, "Username must be alphanumeric", 400
# Check if username less than 32 characters
if len(username) >= 32:
return False, "Username must be less than 32 characters", 400
with ReadUserData() as data:
ensure_ssh_and_users_fields_exist(data)
# Return 409 if user already exists
if data["username"] == username:
return False, "User already exists", 409
for data_user in data["users"]:
if data_user["username"] == username:
return False, "User already exists", 409
hashed_password = hash_password(password)
with WriteUserData() as data:
ensure_ssh_and_users_fields_exist(data)
data["users"].append(
{
"username": username,
"hashedPassword": hashed_password,
"sshKeys": [],
}
)
return True, "User was successfully created!", 201
def delete_user(username: str) -> tuple[bool, str, int]:
with WriteUserData() as data:
ensure_ssh_and_users_fields_exist(data)
if username == data["username"] or username == "root":
return False, "Cannot delete main or root user", 400
# Return 404 if user does not exist
for data_user in data["users"]:
if data_user["username"] == username:
data["users"].remove(data_user)
break
else:
return False, "User does not exist", 404
return True, "User was deleted", 200
def update_user(username: str, password: str) -> tuple[bool, str, int]:
# Check if password is null or none
if password == "":
return False, "Password is null", 400
hashed_password = hash_password(password)
with WriteUserData() as data:
ensure_ssh_and_users_fields_exist(data)
if username == data["username"]:
data["hashedMasterPassword"] = hashed_password
# Return 404 if user does not exist
else:
for data_user in data["users"]:
if data_user["username"] == username:
data_user["hashedPassword"] = hashed_password
break
else:
return False, "User does not exist", 404
return True, "User was successfully updated", 200

View file

@ -2,26 +2,23 @@
# pylint: disable=too-few-public-methods
import datetime
import typing
from flask import request
import strawberry
from strawberry.types import Info
from selfprivacy_api.actions.api_tokens import get_api_tokens_with_caller_flag
from selfprivacy_api.graphql import IsAuthenticated
from selfprivacy_api.utils import parse_date
from selfprivacy_api.dependencies import get_api_version as get_api_version_dependency
from selfprivacy_api.utils.auth import (
get_recovery_token_status,
get_tokens_info,
is_recovery_token_exists,
is_recovery_token_valid,
is_token_name_exists,
is_token_name_pair_valid,
refresh_token,
get_token_name,
)
def get_api_version() -> str:
"""Get API version"""
return "1.2.7"
return get_api_version_dependency()
@strawberry.type
@ -33,24 +30,6 @@ class ApiDevice:
is_caller: bool
def get_devices() -> typing.List[ApiDevice]:
"""Get list of devices"""
caller_name = get_token_name(
request.headers.get("Authorization").split(" ")[1]
if request.headers.get("Authorization") is not None
else None
)
tokens = get_tokens_info()
return [
ApiDevice(
name=token["name"],
creation_date=parse_date(token["date"]),
is_caller=token["name"] == caller_name,
)
for token in tokens
]
@strawberry.type
class ApiRecoveryKeyStatus:
"""Recovery key status"""
@ -97,9 +76,22 @@ class Api:
"""API access status"""
version: str = strawberry.field(resolver=get_api_version)
devices: typing.List[ApiDevice] = strawberry.field(
resolver=get_devices, permission_classes=[IsAuthenticated]
)
@strawberry.field(permission_classes=[IsAuthenticated])
def devices(self, info: Info) -> typing.List[ApiDevice]:
return [
ApiDevice(
name=device.name,
creation_date=device.date,
is_caller=device.is_caller,
)
for device in get_api_tokens_with_caller_flag(
info.context["request"]
.headers.get("Authorization", "")
.replace("Bearer ", "")
)
]
recovery_key: ApiRecoveryKeyStatus = strawberry.field(
resolver=get_recovery_key_status, permission_classes=[IsAuthenticated]
)

View file

@ -0,0 +1,25 @@
"""Jobs status"""
# pylint: disable=too-few-public-methods
import typing
import strawberry
from selfprivacy_api.graphql.common_types.jobs import (
ApiJob,
get_api_job_by_id,
job_to_api_job,
)
from selfprivacy_api.jobs import Jobs
@strawberry.type
class Job:
@strawberry.field
def get_jobs(self) -> typing.List[ApiJob]:
Jobs.get_instance().get_jobs()
return [job_to_api_job(job) for job in Jobs.get_instance().get_jobs()]
@strawberry.field
def get_job(self, job_id: str) -> typing.Optional[ApiJob]:
return get_api_job_by_id(job_id)

View file

@ -1,7 +1,5 @@
"""Enums representing different service providers."""
from enum import Enum
import datetime
import typing
import strawberry

View file

@ -0,0 +1,18 @@
"""Services status"""
# pylint: disable=too-few-public-methods
import typing
import strawberry
from selfprivacy_api.graphql.common_types.service import (
Service,
service_to_graphql_service,
)
from selfprivacy_api.services import get_all_services
@strawberry.type
class Services:
@strawberry.field
def all_services(self) -> typing.List[Service]:
services = get_all_services()
return [service_to_graphql_service(service) for service in services]

View file

@ -2,23 +2,13 @@
# pylint: disable=too-few-public-methods
import typing
import strawberry
from selfprivacy_api.graphql.common_types.service import (
StorageVolume,
)
from selfprivacy_api.utils.block_devices import BlockDevices
@strawberry.type
class StorageVolume:
"""Stats and basic info about a volume or a system disk."""
total_space: str
free_space: str
used_space: str
root: bool
name: str
model: typing.Optional[str]
serial: typing.Optional[str]
type: str
@strawberry.type
class Storage:
"""GraphQL queries to get storage information."""

View file

@ -1,23 +1,18 @@
"""Common system information and settings"""
# pylint: disable=too-few-public-methods
import subprocess
import os
import typing
import strawberry
from selfprivacy_api.graphql.common_types.dns import DnsRecord
from selfprivacy_api.graphql.queries.common import Alert, Severity
from selfprivacy_api.graphql.queries.providers import DnsProvider, ServerProvider
from selfprivacy_api.jobs import Jobs
from selfprivacy_api.jobs.migrate_to_binds import is_bind_migrated
from selfprivacy_api.services import get_all_required_dns_records
from selfprivacy_api.utils import ReadUserData
@strawberry.type
class DnsRecord:
"""DNS record"""
recordType: str
name: str
content: str
ttl: int
priority: typing.Optional[int]
import selfprivacy_api.actions.system as system_actions
import selfprivacy_api.actions.ssh as ssh_actions
@strawberry.type
@ -27,7 +22,20 @@ class SystemDomainInfo:
domain: str
hostname: str
provider: DnsProvider
required_dns_records: typing.List[DnsRecord]
@strawberry.field
def required_dns_records(self) -> typing.List[DnsRecord]:
"""Collect all required DNS records for all services"""
return [
DnsRecord(
record_type=record.type,
name=record.name,
content=record.content,
ttl=record.ttl,
priority=record.priority,
)
for record in get_all_required_dns_records()
]
def get_system_domain_info() -> SystemDomainInfo:
@ -37,8 +45,6 @@ def get_system_domain_info() -> SystemDomainInfo:
domain=user_data["domain"],
hostname=user_data["hostname"],
provider=DnsProvider.CLOUDFLARE,
# TODO: get ip somehow
required_dns_records=[],
)
@ -52,17 +58,11 @@ class AutoUpgradeOptions:
def get_auto_upgrade_options() -> AutoUpgradeOptions:
"""Get automatic upgrade options"""
with ReadUserData() as user_data:
if "autoUpgrade" not in user_data:
return AutoUpgradeOptions(enable=True, allow_reboot=False)
if "enable" not in user_data["autoUpgrade"]:
user_data["autoUpgrade"]["enable"] = True
if "allowReboot" not in user_data["autoUpgrade"]:
user_data["autoUpgrade"]["allowReboot"] = False
return AutoUpgradeOptions(
enable=user_data["autoUpgrade"]["enable"],
allow_reboot=user_data["autoUpgrade"]["allowReboot"],
)
settings = system_actions.get_auto_upgrade_settings()
return AutoUpgradeOptions(
enable=settings.enable,
allow_reboot=settings.allowReboot,
)
@strawberry.type
@ -76,30 +76,17 @@ class SshSettings:
def get_ssh_settings() -> SshSettings:
"""Get SSH settings"""
with ReadUserData() as user_data:
if "ssh" not in user_data:
return SshSettings(
enable=False, password_authentication=False, root_ssh_keys=[]
)
if "enable" not in user_data["ssh"]:
user_data["ssh"]["enable"] = False
if "passwordAuthentication" not in user_data["ssh"]:
user_data["ssh"]["passwordAuthentication"] = False
if "rootKeys" not in user_data["ssh"]:
user_data["ssh"]["rootKeys"] = []
return SshSettings(
enable=user_data["ssh"]["enable"],
password_authentication=user_data["ssh"]["passwordAuthentication"],
root_ssh_keys=user_data["ssh"]["rootKeys"],
)
settings = ssh_actions.get_ssh_settings()
return SshSettings(
enable=settings.enable,
password_authentication=settings.passwordAuthentication,
root_ssh_keys=settings.rootKeys,
)
def get_system_timezone() -> str:
"""Get system timezone"""
with ReadUserData() as user_data:
if "timezone" not in user_data:
return "Europe/Uzhgorod"
return user_data["timezone"]
return system_actions.get_timezone()
@strawberry.type
@ -115,12 +102,12 @@ class SystemSettings:
def get_system_version() -> str:
"""Get system version"""
return subprocess.check_output(["uname", "-a"]).decode("utf-8").strip()
return system_actions.get_system_version()
def get_python_version() -> str:
"""Get Python version"""
return subprocess.check_output(["python", "-V"]).decode("utf-8").strip()
return system_actions.get_python_version()
@strawberry.type
@ -130,6 +117,11 @@ class SystemInfo:
system_version: str = strawberry.field(resolver=get_system_version)
python_version: str = strawberry.field(resolver=get_python_version)
@strawberry.field
def using_binds(self) -> bool:
"""Check if the system is using BINDs"""
return is_bind_migrated()
@strawberry.type
class SystemProviderInfo:
@ -162,4 +154,13 @@ class System:
settings: SystemSettings = SystemSettings()
info: SystemInfo = SystemInfo()
provider: SystemProviderInfo = strawberry.field(resolver=get_system_provider_info)
busy: bool = False
@strawberry.field
def busy(self) -> bool:
"""Check if the system is busy"""
return Jobs.is_busy()
@strawberry.field
def working_directory(self) -> str:
"""Get working directory"""
return os.getcwd()

View file

@ -5,27 +5,12 @@ import strawberry
from selfprivacy_api.graphql.common_types.user import (
User,
ensure_ssh_and_users_fields_exist,
get_user_by_username,
get_users,
)
from selfprivacy_api.utils import ReadUserData
from selfprivacy_api.graphql import IsAuthenticated
def get_users() -> typing.List[User]:
"""Get users"""
user_list = []
with ReadUserData() as data:
ensure_ssh_and_users_fields_exist(data)
for user in data["users"]:
user_list.append(get_user_by_username(user["username"]))
user_list.append(get_user_by_username(data["username"]))
return user_list
@strawberry.type
class Users:
@strawberry.field(permission_classes=[IsAuthenticated])

View file

@ -1,19 +1,27 @@
"""GraphQL API for SelfPrivacy."""
# pylint: disable=too-few-public-methods
import asyncio
from typing import AsyncGenerator
import strawberry
from selfprivacy_api.graphql import IsAuthenticated
from selfprivacy_api.graphql.mutations.api_mutations import ApiMutations
from selfprivacy_api.graphql.mutations.job_mutations import JobMutations
from selfprivacy_api.graphql.mutations.mutation_interface import GenericMutationReturn
from selfprivacy_api.graphql.mutations.services_mutations import ServicesMutations
from selfprivacy_api.graphql.mutations.ssh_mutations import SshMutations
from selfprivacy_api.graphql.mutations.storage_mutation import StorageMutations
from selfprivacy_api.graphql.mutations.storage_mutations import StorageMutations
from selfprivacy_api.graphql.mutations.system_mutations import SystemMutations
from selfprivacy_api.graphql.queries.api_queries import Api
from selfprivacy_api.graphql.queries.jobs import Job
from selfprivacy_api.graphql.queries.services import Services
from selfprivacy_api.graphql.queries.storage import Storage
from selfprivacy_api.graphql.queries.system import System
from selfprivacy_api.graphql.mutations.users_mutations import UserMutations
from selfprivacy_api.graphql.queries.users import Users
from selfprivacy_api.jobs.test import test_job
@strawberry.type
@ -40,6 +48,16 @@ class Query:
"""Storage queries"""
return Storage()
@strawberry.field(permission_classes=[IsAuthenticated])
def jobs(self) -> Job:
"""Jobs queries"""
return Job()
@strawberry.field(permission_classes=[IsAuthenticated])
def services(self) -> Services:
"""Services queries"""
return Services()
@strawberry.type
class Mutation(
@ -48,10 +66,33 @@ class Mutation(
UserMutations,
SshMutations,
StorageMutations,
ServicesMutations,
JobMutations,
):
"""Root schema for mutations"""
@strawberry.mutation(permission_classes=[IsAuthenticated])
def test_mutation(self) -> GenericMutationReturn:
"""Test mutation"""
test_job()
return GenericMutationReturn(
success=True,
message="Test mutation",
code=200,
)
pass
schema = strawberry.Schema(query=Query, mutation=Mutation)
@strawberry.type
class Subscription:
"""Root schema for subscriptions"""
@strawberry.subscription(permission_classes=[IsAuthenticated])
async def count(self, target: int = 100) -> AsyncGenerator[int, None]:
for i in range(target):
yield i
await asyncio.sleep(0.5)
schema = strawberry.Schema(query=Query, mutation=Mutation, subscription=Subscription)

View file

@ -16,12 +16,18 @@ A job is a dictionary with the following keys:
"""
import typing
import datetime
from uuid import UUID
import asyncio
import json
import os
import time
import uuid
from enum import Enum
from pydantic import BaseModel
from selfprivacy_api.utils import ReadUserData, UserDataFiles, WriteUserData
class JobStatus(Enum):
"""
@ -34,65 +40,23 @@ class JobStatus(Enum):
ERROR = "ERROR"
class Job:
class Job(BaseModel):
"""
Job class.
"""
def __init__(
self,
name: str,
description: str,
status: JobStatus,
created_at: datetime.datetime,
updated_at: datetime.datetime,
finished_at: typing.Optional[datetime.datetime],
error: typing.Optional[str],
result: typing.Optional[str],
):
self.id = str(uuid.uuid4())
self.name = name
self.description = description
self.status = status
self.created_at = created_at
self.updated_at = updated_at
self.finished_at = finished_at
self.error = error
self.result = result
def to_dict(self) -> dict:
"""
Convert the job to a dictionary.
"""
return {
"id": self.id,
"name": self.name,
"description": self.description,
"status": self.status,
"created_at": self.created_at,
"updated_at": self.updated_at,
"finished_at": self.finished_at,
"error": self.error,
"result": self.result,
}
def to_json(self) -> str:
"""
Convert the job to a JSON string.
"""
return json.dumps(self.to_dict())
def __str__(self) -> str:
"""
Convert the job to a string.
"""
return self.to_json()
def __repr__(self) -> str:
"""
Convert the job to a string.
"""
return self.to_json()
uid: UUID = uuid.uuid4()
type_id: str
name: str
description: str
status: JobStatus
status_text: typing.Optional[str]
progress: typing.Optional[int]
created_at: datetime.datetime
updated_at: datetime.datetime
finished_at: typing.Optional[datetime.datetime]
error: typing.Optional[str]
result: typing.Optional[str]
class Jobs:
@ -109,6 +73,9 @@ class Jobs:
"""
if Jobs.__instance is None:
Jobs()
if Jobs.__instance is None:
raise Exception("Couldn't init Jobs singleton!")
return Jobs.__instance
return Jobs.__instance
def __init__(self):
@ -119,41 +86,78 @@ class Jobs:
raise Exception("This class is a singleton!")
else:
Jobs.__instance = self
self.jobs = []
@staticmethod
def reset() -> None:
"""
Reset the jobs list.
"""
with WriteUserData(UserDataFiles.JOBS) as user_data:
user_data["jobs"] = []
@staticmethod
def add(
self, name: str, description: str, status: JobStatus = JobStatus.CREATED
name: str,
type_id: str,
description: str,
status: JobStatus = JobStatus.CREATED,
status_text: str = "",
progress: int = 0,
) -> Job:
"""
Add a job to the jobs list.
"""
job = Job(
name=name,
type_id=type_id,
description=description,
status=status,
status_text=status_text,
progress=progress,
created_at=datetime.datetime.now(),
updated_at=datetime.datetime.now(),
finished_at=None,
error=None,
result=None,
)
self.jobs.append(job)
with WriteUserData(UserDataFiles.JOBS) as user_data:
try:
if "jobs" not in user_data:
user_data["jobs"] = []
user_data["jobs"].append(json.loads(job.json()))
except json.decoder.JSONDecodeError:
user_data["jobs"] = [json.loads(job.json())]
return job
def remove(self, job: Job) -> None:
"""
Remove a job from the jobs list.
"""
self.jobs.remove(job)
self.remove_by_uuid(str(job.uid))
def remove_by_uuid(self, job_uuid: str) -> bool:
"""
Remove a job from the jobs list.
"""
with WriteUserData(UserDataFiles.JOBS) as user_data:
if "jobs" not in user_data:
user_data["jobs"] = []
for i, j in enumerate(user_data["jobs"]):
if j["uid"] == job_uuid:
del user_data["jobs"][i]
return True
return False
@staticmethod
def update(
self,
job: Job,
name: typing.Optional[str],
description: typing.Optional[str],
status: JobStatus,
error: typing.Optional[str],
result: typing.Optional[str],
status_text: typing.Optional[str] = None,
progress: typing.Optional[int] = None,
name: typing.Optional[str] = None,
description: typing.Optional[str] = None,
error: typing.Optional[str] = None,
result: typing.Optional[str] = None,
) -> Job:
"""
Update a job in the jobs list.
@ -162,23 +166,62 @@ class Jobs:
job.name = name
if description is not None:
job.description = description
if status_text is not None:
job.status_text = status_text
if progress is not None:
job.progress = progress
job.status = status
job.updated_at = datetime.datetime.now()
job.error = error
job.result = result
if status in (JobStatus.FINISHED, JobStatus.ERROR):
job.finished_at = datetime.datetime.now()
with WriteUserData(UserDataFiles.JOBS) as user_data:
if "jobs" not in user_data:
user_data["jobs"] = []
for i, j in enumerate(user_data["jobs"]):
if j["uid"] == str(job.uid):
user_data["jobs"][i] = json.loads(job.json())
break
return job
def get_job(self, id: str) -> typing.Optional[Job]:
@staticmethod
def get_job(uid: str) -> typing.Optional[Job]:
"""
Get a job from the jobs list.
"""
for job in self.jobs:
if job.id == id:
return job
with ReadUserData(UserDataFiles.JOBS) as user_data:
if "jobs" not in user_data:
user_data["jobs"] = []
for job in user_data["jobs"]:
if job["uid"] == uid:
return Job(**job)
return None
def get_jobs(self) -> list:
@staticmethod
def get_jobs() -> typing.List[Job]:
"""
Get the jobs list.
"""
return self.jobs
with ReadUserData(UserDataFiles.JOBS) as user_data:
try:
if "jobs" not in user_data:
user_data["jobs"] = []
return [Job(**job) for job in user_data["jobs"]]
except json.decoder.JSONDecodeError:
return []
@staticmethod
def is_busy() -> bool:
"""
Check if there is a job running.
"""
with ReadUserData(UserDataFiles.JOBS) as user_data:
if "jobs" not in user_data:
user_data["jobs"] = []
for job in user_data["jobs"]:
if job["status"] == JobStatus.RUNNING.value:
return True
return False

View file

@ -0,0 +1,291 @@
"""Function to perform migration of app data to binds."""
import subprocess
import pathlib
import shutil
from pydantic import BaseModel
from selfprivacy_api.jobs import Job, JobStatus, Jobs
from selfprivacy_api.services.bitwarden import Bitwarden
from selfprivacy_api.services.gitea import Gitea
from selfprivacy_api.services.mailserver import MailServer
from selfprivacy_api.services.nextcloud import Nextcloud
from selfprivacy_api.services.pleroma import Pleroma
from selfprivacy_api.utils import ReadUserData, WriteUserData
from selfprivacy_api.utils.huey import huey
from selfprivacy_api.utils.block_devices import BlockDevices
class BindMigrationConfig(BaseModel):
"""Config for bind migration.
For each service provide block device name.
"""
email_block_device: str
bitwarden_block_device: str
gitea_block_device: str
nextcloud_block_device: str
pleroma_block_device: str
def is_bind_migrated() -> bool:
"""Check if bind migration was performed."""
with ReadUserData() as user_data:
return user_data.get("useBinds", False)
def activate_binds(config: BindMigrationConfig):
"""Activate binds."""
# Activate binds in userdata
with WriteUserData() as user_data:
if "email" not in user_data:
user_data["email"] = {}
user_data["email"]["location"] = config.email_block_device
if "bitwarden" not in user_data:
user_data["bitwarden"] = {}
user_data["bitwarden"]["location"] = config.bitwarden_block_device
if "gitea" not in user_data:
user_data["gitea"] = {}
user_data["gitea"]["location"] = config.gitea_block_device
if "nextcloud" not in user_data:
user_data["nextcloud"] = {}
user_data["nextcloud"]["location"] = config.nextcloud_block_device
if "pleroma" not in user_data:
user_data["pleroma"] = {}
user_data["pleroma"]["location"] = config.pleroma_block_device
user_data["useBinds"] = True
def move_folder(
data_path: pathlib.Path, bind_path: pathlib.Path, user: str, group: str
):
"""Move folder from data to bind."""
if data_path.exists():
shutil.move(str(data_path), str(bind_path))
else:
return
data_path.mkdir(mode=0o750, parents=True, exist_ok=True)
shutil.chown(str(bind_path), user=user, group=group)
shutil.chown(str(data_path), user=user, group=group)
subprocess.run(["mount", "--bind", str(bind_path), str(data_path)], check=True)
subprocess.run(["chown", "-R", f"{user}:{group}", str(data_path)], check=True)
@huey.task()
def migrate_to_binds(config: BindMigrationConfig, job: Job):
"""Migrate app data to binds."""
# Exit if migration is already done
if is_bind_migrated():
Jobs.update(
job=job,
status=JobStatus.ERROR,
error="Migration already done.",
)
return
Jobs.update(
job=job,
status=JobStatus.RUNNING,
progress=0,
status_text="Checking if all volumes are available.",
)
# Get block devices.
block_devices = BlockDevices().get_block_devices()
block_device_names = [device.name for device in block_devices]
# Get all unique required block devices
required_block_devices = []
for block_device_name in config.__dict__.values():
if block_device_name not in required_block_devices:
required_block_devices.append(block_device_name)
# Check if all block devices from config are present.
for block_device_name in required_block_devices:
if block_device_name not in block_device_names:
Jobs.update(
job=job,
status=JobStatus.ERROR,
error=f"Block device {block_device_name} not found.",
)
return
# Make sure all required block devices are mounted.
# sda1 is the root partition and is always mounted.
for block_device_name in required_block_devices:
if block_device_name == "sda1":
continue
block_device = BlockDevices().get_block_device(block_device_name)
if block_device is None:
Jobs.update(
job=job,
status=JobStatus.ERROR,
error=f"Block device {block_device_name} not found.",
)
return
if f"/volumes/{block_device_name}" not in block_device.mountpoints:
Jobs.update(
job=job,
status=JobStatus.ERROR,
error=f"Block device {block_device_name} not mounted.",
)
return
# Make sure /volumes/sda1 exists.
pathlib.Path("/volumes/sda1").mkdir(parents=True, exist_ok=True)
Jobs.update(
job=job,
status=JobStatus.RUNNING,
progress=5,
status_text="Activating binds in NixOS config.",
)
activate_binds(config)
# Perform migration of Nextcloud.
Jobs.update(
job=job,
status=JobStatus.RUNNING,
progress=10,
status_text="Migrating Nextcloud.",
)
Nextcloud().stop()
move_folder(
data_path=pathlib.Path("/var/lib/nextcloud"),
bind_path=pathlib.Path(f"/volumes/{config.nextcloud_block_device}/nextcloud"),
user="nextcloud",
group="nextcloud",
)
# Start Nextcloud
Nextcloud().start()
# Perform migration of Bitwarden
Jobs.update(
job=job,
status=JobStatus.RUNNING,
progress=28,
status_text="Migrating Bitwarden.",
)
Bitwarden().stop()
move_folder(
data_path=pathlib.Path("/var/lib/bitwarden"),
bind_path=pathlib.Path(f"/volumes/{config.bitwarden_block_device}/bitwarden"),
user="vaultwarden",
group="vaultwarden",
)
move_folder(
data_path=pathlib.Path("/var/lib/bitwarden_rs"),
bind_path=pathlib.Path(
f"/volumes/{config.bitwarden_block_device}/bitwarden_rs"
),
user="vaultwarden",
group="vaultwarden",
)
# Start Bitwarden
Bitwarden().start()
# Perform migration of Gitea
Jobs.update(
job=job,
status=JobStatus.RUNNING,
progress=46,
status_text="Migrating Gitea.",
)
Gitea().stop()
move_folder(
data_path=pathlib.Path("/var/lib/gitea"),
bind_path=pathlib.Path(f"/volumes/{config.gitea_block_device}/gitea"),
user="gitea",
group="gitea",
)
Gitea().start()
# Perform migration of Mail server
Jobs.update(
job=job,
status=JobStatus.RUNNING,
progress=64,
status_text="Migrating Mail server.",
)
MailServer().stop()
move_folder(
data_path=pathlib.Path("/var/vmail"),
bind_path=pathlib.Path(f"/volumes/{config.email_block_device}/vmail"),
user="virtualMail",
group="virtualMail",
)
move_folder(
data_path=pathlib.Path("/var/sieve"),
bind_path=pathlib.Path(f"/volumes/{config.email_block_device}/sieve"),
user="virtualMail",
group="virtualMail",
)
MailServer().start()
# Perform migration of Pleroma
Jobs.update(
job=job,
status=JobStatus.RUNNING,
progress=82,
status_text="Migrating Pleroma.",
)
Pleroma().stop()
move_folder(
data_path=pathlib.Path("/var/lib/pleroma"),
bind_path=pathlib.Path(f"/volumes/{config.pleroma_block_device}/pleroma"),
user="pleroma",
group="pleroma",
)
move_folder(
data_path=pathlib.Path("/var/lib/postgresql"),
bind_path=pathlib.Path(f"/volumes/{config.pleroma_block_device}/postgresql"),
user="postgres",
group="postgres",
)
Pleroma().start()
Jobs.update(
job=job,
status=JobStatus.FINISHED,
progress=100,
status_text="Migration finished.",
result="Migration finished.",
)
def start_bind_migration(config: BindMigrationConfig) -> Job:
"""Start migration."""
job = Jobs.add(
type_id="migrations.migrate_to_binds",
name="Migrate to binds",
description="Migration required to use the new disk space management.",
)
migrate_to_binds(config, job)
return job

View file

@ -0,0 +1,57 @@
import time
from selfprivacy_api.utils.huey import huey
from selfprivacy_api.jobs import JobStatus, Jobs
@huey.task()
def test_job():
job = Jobs.get_instance().add(
type_id="test",
name="Test job",
description="This is a test job.",
status=JobStatus.CREATED,
status_text="",
progress=0,
)
time.sleep(5)
Jobs.get_instance().update(
job=job,
status=JobStatus.RUNNING,
status_text="Performing pre-move checks...",
progress=5,
)
time.sleep(5)
Jobs.get_instance().update(
job=job,
status=JobStatus.RUNNING,
status_text="Performing pre-move checks...",
progress=10,
)
time.sleep(5)
Jobs.get_instance().update(
job=job,
status=JobStatus.RUNNING,
status_text="Performing pre-move checks...",
progress=15,
)
time.sleep(5)
Jobs.get_instance().update(
job=job,
status=JobStatus.RUNNING,
status_text="Performing pre-move checks...",
progress=20,
)
time.sleep(5)
Jobs.get_instance().update(
job=job,
status=JobStatus.RUNNING,
status_text="Performing pre-move checks...",
progress=25,
)
time.sleep(5)
Jobs.get_instance().update(
job=job,
status=JobStatus.FINISHED,
status_text="Job finished.",
progress=100,
)

View file

@ -1,14 +0,0 @@
#!/usr/bin/env python3
"""API authentication module"""
from flask import Blueprint
from flask_restful import Api
auth = Blueprint("auth", __name__, url_prefix="/auth")
api = Api(auth)
from . import (
new_device,
recovery_token,
app_tokens,
)

View file

@ -1,118 +0,0 @@
#!/usr/bin/env python3
"""App tokens management module"""
from flask import request
from flask_restful import Resource, reqparse
from selfprivacy_api.resources.api_auth import api
from selfprivacy_api.utils.auth import (
delete_token,
get_tokens_info,
is_token_name_exists,
is_token_name_pair_valid,
refresh_token,
get_token_name,
)
class Tokens(Resource):
"""Token management class
GET returns the list of active devices.
DELETE invalidates token unless it is the last one or the caller uses this token.
POST refreshes the token of the caller.
"""
def get(self):
"""
Get current device tokens
---
tags:
- Tokens
security:
- bearerAuth: []
responses:
200:
description: List of tokens
400:
description: Bad request
"""
caller_name = get_token_name(request.headers.get("Authorization").split(" ")[1])
tokens = get_tokens_info()
# Retrun a list of tokens and if it is the caller's token
# it will be marked with a flag
return [
{
"name": token["name"],
"date": token["date"],
"is_caller": token["name"] == caller_name,
}
for token in tokens
]
def delete(self):
"""
Delete token
---
tags:
- Tokens
security:
- bearerAuth: []
parameters:
- in: body
name: token
required: true
description: Token's name to delete
schema:
type: object
properties:
token_name:
type: string
description: Token name to delete
required: true
responses:
200:
description: Token deleted
400:
description: Bad request
404:
description: Token not found
"""
parser = reqparse.RequestParser()
parser.add_argument(
"token_name", type=str, required=True, help="Token to delete"
)
args = parser.parse_args()
token_name = args["token_name"]
if is_token_name_pair_valid(
token_name, request.headers.get("Authorization").split(" ")[1]
):
return {"message": "Cannot delete caller's token"}, 400
if not is_token_name_exists(token_name):
return {"message": "Token not found"}, 404
delete_token(token_name)
return {"message": "Token deleted"}, 200
def post(self):
"""
Refresh token
---
tags:
- Tokens
security:
- bearerAuth: []
responses:
200:
description: Token refreshed
400:
description: Bad request
404:
description: Token not found
"""
# Get token from header
token = request.headers.get("Authorization").split(" ")[1]
new_token = refresh_token(token)
if new_token is None:
return {"message": "Token not found"}, 404
return {"token": new_token}, 200
api.add_resource(Tokens, "/tokens")

View file

@ -1,103 +0,0 @@
#!/usr/bin/env python3
"""New device auth module"""
from flask_restful import Resource, reqparse
from selfprivacy_api.resources.api_auth import api
from selfprivacy_api.utils.auth import (
get_new_device_auth_token,
use_new_device_auth_token,
delete_new_device_auth_token,
)
class NewDevice(Resource):
"""New device auth class
POST returns a new token for the caller.
"""
def post(self):
"""
Get new device token
---
tags:
- Tokens
security:
- bearerAuth: []
responses:
200:
description: New device token
400:
description: Bad request
"""
token = get_new_device_auth_token()
return {"token": token}
def delete(self):
"""
Delete new device token
---
tags:
- Tokens
security:
- bearerAuth: []
responses:
200:
description: New device token deleted
400:
description: Bad request
"""
delete_new_device_auth_token()
return {"token": None}
class AuthorizeDevice(Resource):
"""Authorize device class
POST authorizes the caller.
"""
def post(self):
"""
Authorize device
---
tags:
- Tokens
parameters:
- in: body
name: data
required: true
description: Who is authorizing
schema:
type: object
properties:
token:
type: string
description: Mnemonic token to authorize
device:
type: string
description: Device to authorize
responses:
200:
description: Device authorized
400:
description: Bad request
404:
description: Token not found
"""
parser = reqparse.RequestParser()
parser.add_argument(
"token", type=str, required=True, help="Mnemonic token to authorize"
)
parser.add_argument(
"device", type=str, required=True, help="Device to authorize"
)
args = parser.parse_args()
auth_token = args["token"]
device = args["device"]
token = use_new_device_auth_token(auth_token, device)
if token is None:
return {"message": "Token not found"}, 404
return {"message": "Device authorized", "token": token}, 200
api.add_resource(NewDevice, "/new_device")
api.add_resource(AuthorizeDevice, "/new_device/authorize")

View file

@ -1,205 +0,0 @@
#!/usr/bin/env python3
"""Recovery token module"""
from datetime import datetime
from flask_restful import Resource, reqparse
from selfprivacy_api.resources.api_auth import api
from selfprivacy_api.utils import parse_date
from selfprivacy_api.utils.auth import (
is_recovery_token_exists,
is_recovery_token_valid,
get_recovery_token_status,
generate_recovery_token,
use_mnemonic_recoverery_token,
)
class RecoveryToken(Resource):
"""Recovery token class
GET returns the status of the recovery token.
POST generates a new recovery token.
"""
def get(self):
"""
Get recovery token status
---
tags:
- Tokens
security:
- bearerAuth: []
responses:
200:
description: Recovery token status
schema:
type: object
properties:
exists:
type: boolean
description: Recovery token exists
valid:
type: boolean
description: Recovery token is valid
date:
type: string
description: Recovery token date
expiration:
type: string
description: Recovery token expiration date
uses_left:
type: integer
description: Recovery token uses left
400:
description: Bad request
"""
if not is_recovery_token_exists():
return {
"exists": False,
"valid": False,
"date": None,
"expiration": None,
"uses_left": None,
}
status = get_recovery_token_status()
# check if status is None
if status is None:
return {
"exists": False,
"valid": False,
"date": None,
"expiration": None,
"uses_left": None,
}
if not is_recovery_token_valid():
return {
"exists": True,
"valid": False,
"date": status["date"],
"expiration": status["expiration"],
"uses_left": status["uses_left"],
}
return {
"exists": True,
"valid": True,
"date": status["date"],
"expiration": status["expiration"],
"uses_left": status["uses_left"],
}
def post(self):
"""
Generate recovery token
---
tags:
- Tokens
security:
- bearerAuth: []
parameters:
- in: body
name: data
required: true
description: Token data
schema:
type: object
properties:
expiration:
type: string
description: Token expiration date
uses:
type: integer
description: Token uses
responses:
200:
description: Recovery token generated
schema:
type: object
properties:
token:
type: string
description: Mnemonic recovery token
400:
description: Bad request
"""
parser = reqparse.RequestParser()
parser.add_argument(
"expiration", type=str, required=False, help="Token expiration date"
)
parser.add_argument("uses", type=int, required=False, help="Token uses")
args = parser.parse_args()
# Convert expiration date to datetime and return 400 if it is not valid
if args["expiration"]:
try:
expiration = parse_date(args["expiration"])
# Retrun 400 if expiration date is in the past
if expiration < datetime.now():
return {"message": "Expiration date cannot be in the past"}, 400
except ValueError:
return {
"error": "Invalid expiration date. Use YYYY-MM-DDTHH:MM:SS.SSS"
}, 400
else:
expiration = None
if args["uses"] is not None and args["uses"] < 1:
return {"message": "Uses must be greater than 0"}, 400
# Generate recovery token
token = generate_recovery_token(expiration, args["uses"])
return {"token": token}
class UseRecoveryToken(Resource):
"""Use recovery token class
POST uses the recovery token.
"""
def post(self):
"""
Use recovery token
---
tags:
- Tokens
parameters:
- in: body
name: data
required: true
description: Token data
schema:
type: object
properties:
token:
type: string
description: Mnemonic recovery token
device:
type: string
description: Device to authorize
responses:
200:
description: Recovery token used
schema:
type: object
properties:
token:
type: string
description: Device authorization token
400:
description: Bad request
404:
description: Token not found
"""
parser = reqparse.RequestParser()
parser.add_argument(
"token", type=str, required=True, help="Mnemonic recovery token"
)
parser.add_argument(
"device", type=str, required=True, help="Device to authorize"
)
args = parser.parse_args()
# Use recovery token
token = use_mnemonic_recoverery_token(args["token"], args["device"])
if token is None:
return {"error": "Token not found"}, 404
return {"token": token}
api.add_resource(RecoveryToken, "/recovery_token")
api.add_resource(UseRecoveryToken, "/recovery_token/use")

View file

@ -1,27 +0,0 @@
#!/usr/bin/env python3
"""Unassigned views"""
from flask_restful import Resource
from selfprivacy_api.graphql.queries.api_queries import get_api_version
class ApiVersion(Resource):
"""SelfPrivacy API version"""
def get(self):
"""Get API version
---
tags:
- System
responses:
200:
description: API version
schema:
type: object
properties:
version:
type: string
description: API version
401:
description: Unauthorized
"""
return {"version": get_api_version()}

View file

@ -1,19 +0,0 @@
#!/usr/bin/env python3
"""Services management module"""
from flask import Blueprint
from flask_restful import Api
services = Blueprint("services", __name__, url_prefix="/services")
api = Api(services)
from . import (
bitwarden,
gitea,
mailserver,
main,
nextcloud,
ocserv,
pleroma,
restic,
ssh,
)

View file

@ -1,66 +0,0 @@
#!/usr/bin/env python3
"""Bitwarden management module"""
from flask_restful import Resource
from selfprivacy_api.resources.services import api
from selfprivacy_api.utils import WriteUserData
class EnableBitwarden(Resource):
"""Enable Bitwarden"""
def post(self):
"""
Enable Bitwarden
---
tags:
- Bitwarden
security:
- bearerAuth: []
responses:
200:
description: Bitwarden enabled
401:
description: Unauthorized
"""
with WriteUserData() as data:
if "bitwarden" not in data:
data["bitwarden"] = {}
data["bitwarden"]["enable"] = True
return {
"status": 0,
"message": "Bitwarden enabled",
}
class DisableBitwarden(Resource):
"""Disable Bitwarden"""
def post(self):
"""
Disable Bitwarden
---
tags:
- Bitwarden
security:
- bearerAuth: []
responses:
200:
description: Bitwarden disabled
401:
description: Unauthorized
"""
with WriteUserData() as data:
if "bitwarden" not in data:
data["bitwarden"] = {}
data["bitwarden"]["enable"] = False
return {
"status": 0,
"message": "Bitwarden disabled",
}
api.add_resource(EnableBitwarden, "/bitwarden/enable")
api.add_resource(DisableBitwarden, "/bitwarden/disable")

View file

@ -1,66 +0,0 @@
#!/usr/bin/env python3
"""Gitea management module"""
from flask_restful import Resource
from selfprivacy_api.resources.services import api
from selfprivacy_api.utils import WriteUserData
class EnableGitea(Resource):
"""Enable Gitea"""
def post(self):
"""
Enable Gitea
---
tags:
- Gitea
security:
- bearerAuth: []
responses:
200:
description: Gitea enabled
401:
description: Unauthorized
"""
with WriteUserData() as data:
if "gitea" not in data:
data["gitea"] = {}
data["gitea"]["enable"] = True
return {
"status": 0,
"message": "Gitea enabled",
}
class DisableGitea(Resource):
"""Disable Gitea"""
def post(self):
"""
Disable Gitea
---
tags:
- Gitea
security:
- bearerAuth: []
responses:
200:
description: Gitea disabled
401:
description: Unauthorized
"""
with WriteUserData() as data:
if "gitea" not in data:
data["gitea"] = {}
data["gitea"]["enable"] = False
return {
"status": 0,
"message": "Gitea disabled",
}
api.add_resource(EnableGitea, "/gitea/enable")
api.add_resource(DisableGitea, "/gitea/disable")

View file

@ -1,41 +0,0 @@
#!/usr/bin/env python3
"""Mail server management module"""
import base64
import subprocess
import os
from flask_restful import Resource
from selfprivacy_api.resources.services import api
from selfprivacy_api.utils import get_dkim_key, get_domain
class DKIMKey(Resource):
"""Get DKIM key from file"""
def get(self):
"""
Get DKIM key from file
---
tags:
- Email
security:
- bearerAuth: []
responses:
200:
description: DKIM key encoded in base64
401:
description: Unauthorized
404:
description: DKIM key not found
"""
domain = get_domain()
dkim = get_dkim_key(domain)
if dkim is None:
return "DKIM file not found", 404
dkim = base64.b64encode(dkim.encode("utf-8")).decode("utf-8")
return dkim
api.add_resource(DKIMKey, "/mailserver/dkim")

View file

@ -1,84 +0,0 @@
#!/usr/bin/env python3
"""Services status module"""
import subprocess
from flask_restful import Resource
from . import api
class ServiceStatus(Resource):
"""Get service status"""
def get(self):
"""
Get service status
---
tags:
- Services
responses:
200:
description: Service status
schema:
type: object
properties:
imap:
type: integer
description: Dovecot service status
smtp:
type: integer
description: Postfix service status
http:
type: integer
description: Nginx service status
bitwarden:
type: integer
description: Bitwarden service status
gitea:
type: integer
description: Gitea service status
nextcloud:
type: integer
description: Nextcloud service status
ocserv:
type: integer
description: OpenConnect VPN service status
pleroma:
type: integer
description: Pleroma service status
401:
description: Unauthorized
"""
imap_service = subprocess.Popen(["systemctl", "status", "dovecot2.service"])
imap_service.communicate()[0]
smtp_service = subprocess.Popen(["systemctl", "status", "postfix.service"])
smtp_service.communicate()[0]
http_service = subprocess.Popen(["systemctl", "status", "nginx.service"])
http_service.communicate()[0]
bitwarden_service = subprocess.Popen(
["systemctl", "status", "vaultwarden.service"]
)
bitwarden_service.communicate()[0]
gitea_service = subprocess.Popen(["systemctl", "status", "gitea.service"])
gitea_service.communicate()[0]
nextcloud_service = subprocess.Popen(
["systemctl", "status", "phpfpm-nextcloud.service"]
)
nextcloud_service.communicate()[0]
ocserv_service = subprocess.Popen(["systemctl", "status", "ocserv.service"])
ocserv_service.communicate()[0]
pleroma_service = subprocess.Popen(["systemctl", "status", "pleroma.service"])
pleroma_service.communicate()[0]
return {
"imap": imap_service.returncode,
"smtp": smtp_service.returncode,
"http": http_service.returncode,
"bitwarden": bitwarden_service.returncode,
"gitea": gitea_service.returncode,
"nextcloud": nextcloud_service.returncode,
"ocserv": ocserv_service.returncode,
"pleroma": pleroma_service.returncode,
}
api.add_resource(ServiceStatus, "/status")

View file

@ -1,66 +0,0 @@
#!/usr/bin/env python3
"""Nextcloud management module"""
from flask_restful import Resource
from selfprivacy_api.resources.services import api
from selfprivacy_api.utils import WriteUserData
class EnableNextcloud(Resource):
"""Enable Nextcloud"""
def post(self):
"""
Enable Nextcloud
---
tags:
- Nextcloud
security:
- bearerAuth: []
responses:
200:
description: Nextcloud enabled
401:
description: Unauthorized
"""
with WriteUserData() as data:
if "nextcloud" not in data:
data["nextcloud"] = {}
data["nextcloud"]["enable"] = True
return {
"status": 0,
"message": "Nextcloud enabled",
}
class DisableNextcloud(Resource):
"""Disable Nextcloud"""
def post(self):
"""
Disable Nextcloud
---
tags:
- Nextcloud
security:
- bearerAuth: []
responses:
200:
description: Nextcloud disabled
401:
description: Unauthorized
"""
with WriteUserData() as data:
if "nextcloud" not in data:
data["nextcloud"] = {}
data["nextcloud"]["enable"] = False
return {
"status": 0,
"message": "Nextcloud disabled",
}
api.add_resource(EnableNextcloud, "/nextcloud/enable")
api.add_resource(DisableNextcloud, "/nextcloud/disable")

View file

@ -1,66 +0,0 @@
#!/usr/bin/env python3
"""OpenConnect VPN server management module"""
from flask_restful import Resource
from selfprivacy_api.resources.services import api
from selfprivacy_api.utils import WriteUserData
class EnableOcserv(Resource):
"""Enable OpenConnect VPN server"""
def post(self):
"""
Enable OCserv
---
tags:
- OCserv
security:
- bearerAuth: []
responses:
200:
description: OCserv enabled
401:
description: Unauthorized
"""
with WriteUserData() as data:
if "ocserv" not in data:
data["ocserv"] = {}
data["ocserv"]["enable"] = True
return {
"status": 0,
"message": "OpenConnect VPN server enabled",
}
class DisableOcserv(Resource):
"""Disable OpenConnect VPN server"""
def post(self):
"""
Disable OCserv
---
tags:
- OCserv
security:
- bearerAuth: []
responses:
200:
description: OCserv disabled
401:
description: Unauthorized
"""
with WriteUserData() as data:
if "ocserv" not in data:
data["ocserv"] = {}
data["ocserv"]["enable"] = False
return {
"status": 0,
"message": "OpenConnect VPN server disabled",
}
api.add_resource(EnableOcserv, "/ocserv/enable")
api.add_resource(DisableOcserv, "/ocserv/disable")

View file

@ -1,66 +0,0 @@
#!/usr/bin/env python3
"""Pleroma management module"""
from flask_restful import Resource
from selfprivacy_api.resources.services import api
from selfprivacy_api.utils import WriteUserData
class EnablePleroma(Resource):
"""Enable Pleroma"""
def post(self):
"""
Enable Pleroma
---
tags:
- Pleroma
security:
- bearerAuth: []
responses:
200:
description: Pleroma enabled
401:
description: Unauthorized
"""
with WriteUserData() as data:
if "pleroma" not in data:
data["pleroma"] = {}
data["pleroma"]["enable"] = True
return {
"status": 0,
"message": "Pleroma enabled",
}
class DisablePleroma(Resource):
"""Disable Pleroma"""
def post(self):
"""
Disable Pleroma
---
tags:
- Pleroma
security:
- bearerAuth: []
responses:
200:
description: Pleroma disabled
401:
description: Unauthorized
"""
with WriteUserData() as data:
if "pleroma" not in data:
data["pleroma"] = {}
data["pleroma"]["enable"] = False
return {
"status": 0,
"message": "Pleroma disabled",
}
api.add_resource(EnablePleroma, "/pleroma/enable")
api.add_resource(DisablePleroma, "/pleroma/disable")

View file

@ -1,241 +0,0 @@
#!/usr/bin/env python3
"""Backups management module"""
from flask_restful import Resource, reqparse
from selfprivacy_api.resources.services import api
from selfprivacy_api.utils import WriteUserData
from selfprivacy_api.restic_controller import tasks as restic_tasks
from selfprivacy_api.restic_controller import ResticController, ResticStates
class ListAllBackups(Resource):
"""List all restic backups"""
def get(self):
"""
Get all restic backups
---
tags:
- Backups
security:
- bearerAuth: []
responses:
200:
description: A list of snapshots
400:
description: Bad request
401:
description: Unauthorized
"""
restic = ResticController()
return restic.snapshot_list
class AsyncCreateBackup(Resource):
"""Create a new restic backup"""
def put(self):
"""
Initiate a new restic backup
---
tags:
- Backups
security:
- bearerAuth: []
responses:
200:
description: Backup creation has started
400:
description: Bad request
401:
description: Unauthorized
409:
description: Backup already in progress
"""
restic = ResticController()
if restic.state is ResticStates.NO_KEY:
return {"error": "No key provided"}, 400
if restic.state is ResticStates.INITIALIZING:
return {"error": "Backup is initializing"}, 400
if restic.state is ResticStates.BACKING_UP:
return {"error": "Backup is already running"}, 409
restic_tasks.start_backup()
return {
"status": 0,
"message": "Backup creation has started",
}
class CheckBackupStatus(Resource):
"""Check current backup status"""
def get(self):
"""
Get backup status
---
tags:
- Backups
security:
- bearerAuth: []
responses:
200:
description: Backup status
400:
description: Bad request
401:
description: Unauthorized
"""
restic = ResticController()
return {
"status": restic.state.name,
"progress": restic.progress,
"error_message": restic.error_message,
}
class ForceReloadSnapshots(Resource):
"""Force reload snapshots"""
def get(self):
"""
Force reload snapshots
---
tags:
- Backups
security:
- bearerAuth: []
responses:
200:
description: Snapshots reloaded
400:
description: Bad request
401:
description: Unauthorized
"""
restic_tasks.load_snapshots()
return {
"status": 0,
"message": "Snapshots reload started",
}
class AsyncRestoreBackup(Resource):
"""Trigger backup restoration process"""
def put(self):
"""
Start backup restoration
---
tags:
- Backups
security:
- bearerAuth: []
parameters:
- in: body
required: true
name: backup
description: Backup to restore
schema:
type: object
required:
- backupId
properties:
backupId:
type: string
responses:
200:
description: Backup restoration process started
400:
description: Bad request
401:
description: Unauthorized
"""
parser = reqparse.RequestParser()
parser.add_argument("backupId", type=str, required=True)
args = parser.parse_args()
restic = ResticController()
if restic.state is ResticStates.NO_KEY:
return {"error": "No key provided"}, 400
if restic.state is ResticStates.NOT_INITIALIZED:
return {"error": "Repository is not initialized"}, 400
if restic.state is ResticStates.BACKING_UP:
return {"error": "Backup is already running"}, 409
if restic.state is ResticStates.INITIALIZING:
return {"error": "Repository is initializing"}, 400
if restic.state is ResticStates.RESTORING:
return {"error": "Restore is already running"}, 409
for backup in restic.snapshot_list:
if backup["short_id"] == args["backupId"]:
restic_tasks.restore_from_backup(args["backupId"])
return {
"status": 0,
"message": "Backup restoration procedure started",
}
return {"error": "Backup not found"}, 404
class BackblazeConfig(Resource):
"""Backblaze config"""
def put(self):
"""
Set the new key for backblaze
---
tags:
- Backups
security:
- bearerAuth: []
parameters:
- in: body
required: true
name: backblazeSettings
description: New Backblaze settings
schema:
type: object
required:
- accountId
- accountKey
- bucket
properties:
accountId:
type: string
accountKey:
type: string
bucket:
type: string
responses:
200:
description: New Backblaze settings
400:
description: Bad request
401:
description: Unauthorized
"""
parser = reqparse.RequestParser()
parser.add_argument("accountId", type=str, required=True)
parser.add_argument("accountKey", type=str, required=True)
parser.add_argument("bucket", type=str, required=True)
args = parser.parse_args()
with WriteUserData() as data:
if "backblaze" not in data:
data["backblaze"] = {}
data["backblaze"]["accountId"] = args["accountId"]
data["backblaze"]["accountKey"] = args["accountKey"]
data["backblaze"]["bucket"] = args["bucket"]
restic_tasks.update_keys_from_userdata()
return "New Backblaze settings saved"
api.add_resource(ListAllBackups, "/restic/backup/list")
api.add_resource(AsyncCreateBackup, "/restic/backup/create")
api.add_resource(CheckBackupStatus, "/restic/backup/status")
api.add_resource(AsyncRestoreBackup, "/restic/backup/restore")
api.add_resource(BackblazeConfig, "/restic/backblaze/config")
api.add_resource(ForceReloadSnapshots, "/restic/backup/reload")

View file

@ -1,407 +0,0 @@
#!/usr/bin/env python3
"""SSH management module"""
from flask_restful import Resource, reqparse
from selfprivacy_api.resources.services import api
from selfprivacy_api.utils import WriteUserData, ReadUserData, validate_ssh_public_key
class EnableSSH(Resource):
"""Enable SSH"""
def post(self):
"""
Enable SSH
---
tags:
- SSH
security:
- bearerAuth: []
responses:
200:
description: SSH enabled
401:
description: Unauthorized
"""
with WriteUserData() as data:
if "ssh" not in data:
data["ssh"] = {}
data["ssh"]["enable"] = True
return {
"status": 0,
"message": "SSH enabled",
}
class SSHSettings(Resource):
"""Enable/disable SSH"""
def get(self):
"""
Get current SSH settings
---
tags:
- SSH
security:
- bearerAuth: []
responses:
200:
description: SSH settings
400:
description: Bad request
"""
with ReadUserData() as data:
if "ssh" not in data:
return {"enable": True, "passwordAuthentication": True}
if "enable" not in data["ssh"]:
data["ssh"]["enable"] = True
if "passwordAuthentication" not in data["ssh"]:
data["ssh"]["passwordAuthentication"] = True
return {
"enable": data["ssh"]["enable"],
"passwordAuthentication": data["ssh"]["passwordAuthentication"],
}
def put(self):
"""
Change SSH settings
---
tags:
- SSH
security:
- bearerAuth: []
parameters:
- name: sshSettings
in: body
required: true
description: SSH settings
schema:
type: object
required:
- enable
- passwordAuthentication
properties:
enable:
type: boolean
passwordAuthentication:
type: boolean
responses:
200:
description: New settings saved
400:
description: Bad request
"""
parser = reqparse.RequestParser()
parser.add_argument("enable", type=bool, required=False)
parser.add_argument("passwordAuthentication", type=bool, required=False)
args = parser.parse_args()
enable = args["enable"]
password_authentication = args["passwordAuthentication"]
with WriteUserData() as data:
if "ssh" not in data:
data["ssh"] = {}
if enable is not None:
data["ssh"]["enable"] = enable
if password_authentication is not None:
data["ssh"]["passwordAuthentication"] = password_authentication
return "SSH settings changed"
class WriteSSHKey(Resource):
"""Write new SSH key"""
def put(self):
"""
Add a SSH root key
---
consumes:
- application/json
tags:
- SSH
security:
- bearerAuth: []
parameters:
- in: body
name: body
required: true
description: Public key to add
schema:
type: object
required:
- public_key
properties:
public_key:
type: string
description: ssh-ed25519 public key.
responses:
201:
description: Key added
400:
description: Bad request
401:
description: Unauthorized
409:
description: Key already exists
"""
parser = reqparse.RequestParser()
parser.add_argument(
"public_key", type=str, required=True, help="Key cannot be blank!"
)
args = parser.parse_args()
public_key = args["public_key"]
if not validate_ssh_public_key(public_key):
return {
"error": "Invalid key type. Only ssh-ed25519 and ssh-rsa are supported.",
}, 400
with WriteUserData() as data:
if "ssh" not in data:
data["ssh"] = {}
if "rootKeys" not in data["ssh"]:
data["ssh"]["rootKeys"] = []
# Return 409 if key already in array
for key in data["ssh"]["rootKeys"]:
if key == public_key:
return {
"error": "Key already exists",
}, 409
data["ssh"]["rootKeys"].append(public_key)
return {
"status": 0,
"message": "New SSH key successfully written",
}, 201
class SSHKeys(Resource):
"""List SSH keys"""
def get(self, username):
"""
List SSH keys
---
tags:
- SSH
security:
- bearerAuth: []
parameters:
- in: path
name: username
type: string
required: true
description: User to list keys for
responses:
200:
description: SSH keys
401:
description: Unauthorized
"""
with ReadUserData() as data:
if username == "root":
if "ssh" not in data:
data["ssh"] = {}
if "rootKeys" not in data["ssh"]:
data["ssh"]["rootKeys"] = []
return data["ssh"]["rootKeys"]
if username == data["username"]:
if "sshKeys" not in data:
data["sshKeys"] = []
return data["sshKeys"]
if "users" not in data:
data["users"] = []
for user in data["users"]:
if user["username"] == username:
if "sshKeys" not in user:
user["sshKeys"] = []
return user["sshKeys"]
return {
"error": "User not found",
}, 404
def post(self, username):
"""
Add SSH key to the user
---
tags:
- SSH
security:
- bearerAuth: []
parameters:
- in: body
required: true
name: public_key
schema:
type: object
required:
- public_key
properties:
public_key:
type: string
- in: path
name: username
type: string
required: true
description: User to add keys for
responses:
201:
description: SSH key added
401:
description: Unauthorized
404:
description: User not found
409:
description: Key already exists
"""
parser = reqparse.RequestParser()
parser.add_argument(
"public_key", type=str, required=True, help="Key cannot be blank!"
)
args = parser.parse_args()
if username == "root":
return {
"error": "Use /ssh/key/send to add root keys",
}, 400
if not validate_ssh_public_key(args["public_key"]):
return {
"error": "Invalid key type. Only ssh-ed25519 and ssh-rsa are supported.",
}, 400
with WriteUserData() as data:
if username == data["username"]:
if "sshKeys" not in data:
data["sshKeys"] = []
# Return 409 if key already in array
for key in data["sshKeys"]:
if key == args["public_key"]:
return {
"error": "Key already exists",
}, 409
data["sshKeys"].append(args["public_key"])
return {
"message": "New SSH key successfully written",
}, 201
if "users" not in data:
data["users"] = []
for user in data["users"]:
if user["username"] == username:
if "sshKeys" not in user:
user["sshKeys"] = []
# Return 409 if key already in array
for key in user["sshKeys"]:
if key == args["public_key"]:
return {
"error": "Key already exists",
}, 409
user["sshKeys"].append(args["public_key"])
return {
"message": "New SSH key successfully written",
}, 201
return {
"error": "User not found",
}, 404
def delete(self, username):
"""
Delete SSH key
---
tags:
- SSH
security:
- bearerAuth: []
parameters:
- in: body
name: public_key
required: true
description: Key to delete
schema:
type: object
required:
- public_key
properties:
public_key:
type: string
- in: path
name: username
type: string
required: true
description: User to delete keys for
responses:
200:
description: SSH key deleted
401:
description: Unauthorized
404:
description: Key not found
"""
parser = reqparse.RequestParser()
parser.add_argument(
"public_key", type=str, required=True, help="Key cannot be blank!"
)
args = parser.parse_args()
with WriteUserData() as data:
if username == "root":
if "ssh" not in data:
data["ssh"] = {}
if "rootKeys" not in data["ssh"]:
data["ssh"]["rootKeys"] = []
# Return 404 if key not in array
for key in data["ssh"]["rootKeys"]:
if key == args["public_key"]:
data["ssh"]["rootKeys"].remove(key)
# If rootKeys became zero length, delete it
if len(data["ssh"]["rootKeys"]) == 0:
del data["ssh"]["rootKeys"]
return {
"message": "SSH key deleted",
}, 200
return {
"error": "Key not found",
}, 404
if username == data["username"]:
if "sshKeys" not in data:
data["sshKeys"] = []
# Return 404 if key not in array
for key in data["sshKeys"]:
if key == args["public_key"]:
data["sshKeys"].remove(key)
return {
"message": "SSH key deleted",
}, 200
return {
"error": "Key not found",
}, 404
if "users" not in data:
data["users"] = []
for user in data["users"]:
if user["username"] == username:
if "sshKeys" not in user:
user["sshKeys"] = []
# Return 404 if key not in array
for key in user["sshKeys"]:
if key == args["public_key"]:
user["sshKeys"].remove(key)
return {
"message": "SSH key successfully deleted",
}, 200
return {
"error": "Key not found",
}, 404
return {
"error": "User not found",
}, 404
api.add_resource(EnableSSH, "/ssh/enable")
api.add_resource(SSHSettings, "/ssh")
api.add_resource(WriteSSHKey, "/ssh/key/send")
api.add_resource(SSHKeys, "/ssh/keys/<string:username>")

View file

@ -1,346 +0,0 @@
#!/usr/bin/env python3
"""System management module"""
import os
import subprocess
import pytz
from flask import Blueprint
from flask_restful import Resource, Api, reqparse
from selfprivacy_api.graphql.queries.system import (
get_python_version,
get_system_version,
)
from selfprivacy_api.utils import WriteUserData, ReadUserData
api_system = Blueprint("system", __name__, url_prefix="/system")
api = Api(api_system)
class Timezone(Resource):
"""Change timezone of NixOS"""
def get(self):
"""
Get current system timezone
---
tags:
- System
security:
- bearerAuth: []
responses:
200:
description: Timezone
400:
description: Bad request
"""
with ReadUserData() as data:
if "timezone" not in data:
return "Europe/Uzhgorod"
return data["timezone"]
def put(self):
"""
Change system timezone
---
tags:
- System
security:
- bearerAuth: []
parameters:
- name: timezone
in: body
required: true
description: Timezone to set
schema:
type: object
required:
- timezone
properties:
timezone:
type: string
responses:
200:
description: Timezone changed
400:
description: Bad request
"""
parser = reqparse.RequestParser()
parser.add_argument("timezone", type=str, required=True)
timezone = parser.parse_args()["timezone"]
# Check if timezone is a valid tzdata string
if timezone not in pytz.all_timezones:
return {"error": "Invalid timezone"}, 400
with WriteUserData() as data:
data["timezone"] = timezone
return "Timezone changed"
class AutoUpgrade(Resource):
"""Enable/disable automatic upgrades and reboots"""
def get(self):
"""
Get current system autoupgrade settings
---
tags:
- System
security:
- bearerAuth: []
responses:
200:
description: Auto-upgrade settings
400:
description: Bad request
"""
with ReadUserData() as data:
if "autoUpgrade" not in data:
return {"enable": True, "allowReboot": False}
if "enable" not in data["autoUpgrade"]:
data["autoUpgrade"]["enable"] = True
if "allowReboot" not in data["autoUpgrade"]:
data["autoUpgrade"]["allowReboot"] = False
return data["autoUpgrade"]
def put(self):
"""
Change system auto upgrade settings
---
tags:
- System
security:
- bearerAuth: []
parameters:
- name: autoUpgrade
in: body
required: true
description: Auto upgrade settings
schema:
type: object
required:
- enable
- allowReboot
properties:
enable:
type: boolean
allowReboot:
type: boolean
responses:
200:
description: New settings saved
400:
description: Bad request
"""
parser = reqparse.RequestParser()
parser.add_argument("enable", type=bool, required=False)
parser.add_argument("allowReboot", type=bool, required=False)
args = parser.parse_args()
enable = args["enable"]
allow_reboot = args["allowReboot"]
with WriteUserData() as data:
if "autoUpgrade" not in data:
data["autoUpgrade"] = {}
if enable is not None:
data["autoUpgrade"]["enable"] = enable
if allow_reboot is not None:
data["autoUpgrade"]["allowReboot"] = allow_reboot
return "Auto-upgrade settings changed"
class RebuildSystem(Resource):
"""Rebuild NixOS"""
def get(self):
"""
Rebuild NixOS with nixos-rebuild switch
---
tags:
- System
security:
- bearerAuth: []
responses:
200:
description: System rebuild has started
401:
description: Unauthorized
"""
rebuild_result = subprocess.Popen(
["systemctl", "start", "sp-nixos-rebuild.service"], start_new_session=True
)
rebuild_result.communicate()[0]
return rebuild_result.returncode
class RollbackSystem(Resource):
"""Rollback NixOS"""
def get(self):
"""
Rollback NixOS with nixos-rebuild switch --rollback
---
tags:
- System
security:
- bearerAuth: []
responses:
200:
description: System rollback has started
401:
description: Unauthorized
"""
rollback_result = subprocess.Popen(
["systemctl", "start", "sp-nixos-rollback.service"], start_new_session=True
)
rollback_result.communicate()[0]
return rollback_result.returncode
class UpgradeSystem(Resource):
"""Upgrade NixOS"""
def get(self):
"""
Upgrade NixOS with nixos-rebuild switch --upgrade
---
tags:
- System
security:
- bearerAuth: []
responses:
200:
description: System upgrade has started
401:
description: Unauthorized
"""
upgrade_result = subprocess.Popen(
["systemctl", "start", "sp-nixos-upgrade.service"], start_new_session=True
)
upgrade_result.communicate()[0]
return upgrade_result.returncode
class RebootSystem(Resource):
"""Reboot the system"""
def get(self):
"""
Reboot the system
---
tags:
- System
security:
- bearerAuth: []
responses:
200:
description: System reboot has started
401:
description: Unauthorized
"""
subprocess.Popen(["reboot"], start_new_session=True)
return "System reboot has started"
class SystemVersion(Resource):
"""Get system version from uname"""
def get(self):
"""
Get system version from uname -a
---
tags:
- System
security:
- bearerAuth: []
responses:
200:
description: OK
401:
description: Unauthorized
"""
return {
"system_version": get_system_version(),
}
class PythonVersion(Resource):
"""Get python version"""
def get(self):
"""
Get python version used by this API
---
tags:
- System
security:
- bearerAuth: []
responses:
200:
description: OK
401:
description: Unauthorized
"""
return get_python_version()
class PullRepositoryChanges(Resource):
"""Pull NixOS config repository changes"""
def get(self):
"""
Pull Repository Changes
---
tags:
- System
security:
- bearerAuth: []
responses:
200:
description: Got update
201:
description: Nothing to update
401:
description: Unauthorized
500:
description: Something went wrong
"""
git_pull_command = ["git", "pull"]
current_working_directory = os.getcwd()
os.chdir("/etc/nixos")
git_pull_process_descriptor = subprocess.Popen(
git_pull_command,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
shell=False,
)
data = git_pull_process_descriptor.communicate()[0].decode("utf-8")
os.chdir(current_working_directory)
if git_pull_process_descriptor.returncode == 0:
return {
"status": 0,
"message": "Update completed successfully",
"data": data,
}
return {
"status": git_pull_process_descriptor.returncode,
"message": "Something went wrong",
"data": data,
}, 500
api.add_resource(Timezone, "/configuration/timezone")
api.add_resource(AutoUpgrade, "/configuration/autoUpgrade")
api.add_resource(RebuildSystem, "/configuration/apply")
api.add_resource(RollbackSystem, "/configuration/rollback")
api.add_resource(UpgradeSystem, "/configuration/upgrade")
api.add_resource(RebootSystem, "/reboot")
api.add_resource(SystemVersion, "/version")
api.add_resource(PythonVersion, "/pythonVersion")
api.add_resource(PullRepositoryChanges, "/configuration/pull")

View file

@ -1,162 +0,0 @@
#!/usr/bin/env python3
"""Users management module"""
import subprocess
import re
from flask_restful import Resource, reqparse
from selfprivacy_api.utils import WriteUserData, ReadUserData, is_username_forbidden
class Users(Resource):
"""Users management"""
def get(self):
"""
Get a list of users
---
tags:
- Users
security:
- bearerAuth: []
responses:
200:
description: A list of users
401:
description: Unauthorized
"""
parser = reqparse.RequestParser(bundle_errors=True)
parser.add_argument("withMainUser", type=bool, required=False)
args = parser.parse_args()
with_main_user = False if args["withMainUser"] is None else args["withMainUser"]
with ReadUserData() as data:
users = []
if with_main_user:
users.append(data["username"])
if "users" in data:
for user in data["users"]:
users.append(user["username"])
return users
def post(self):
"""
Create a new user
---
consumes:
- application/json
tags:
- Users
security:
- bearerAuth: []
parameters:
- in: body
name: user
required: true
description: User to create
schema:
type: object
required:
- username
- password
properties:
username:
type: string
description: Unix username. Must be alphanumeric and less than 32 characters
password:
type: string
description: Unix password.
responses:
201:
description: Created user
400:
description: Bad request
401:
description: Unauthorized
409:
description: User already exists
"""
parser = reqparse.RequestParser(bundle_errors=True)
parser.add_argument("username", type=str, required=True)
parser.add_argument("password", type=str, required=True)
args = parser.parse_args()
hashing_command = ["mkpasswd", "-m", "sha-512", args["password"]]
password_hash_process_descriptor = subprocess.Popen(
hashing_command,
shell=False,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
)
hashed_password = password_hash_process_descriptor.communicate()[0]
hashed_password = hashed_password.decode("ascii")
hashed_password = hashed_password.rstrip()
# Check if username is forbidden
if is_username_forbidden(args["username"]):
return {"message": "Username is forbidden"}, 409
# Check is username passes regex
if not re.match(r"^[a-z_][a-z0-9_]+$", args["username"]):
return {"error": "username must be alphanumeric"}, 400
# Check if username less than 32 characters
if len(args["username"]) >= 32:
return {"error": "username must be less than 32 characters"}, 400
with WriteUserData() as data:
if "users" not in data:
data["users"] = []
# Return 409 if user already exists
if data["username"] == args["username"]:
return {"error": "User already exists"}, 409
for user in data["users"]:
if user["username"] == args["username"]:
return {"error": "User already exists"}, 409
data["users"].append(
{
"username": args["username"],
"hashedPassword": hashed_password,
}
)
return {"result": 0, "username": args["username"]}, 201
class User(Resource):
"""Single user managment"""
def delete(self, username):
"""
Delete a user
---
tags:
- Users
security:
- bearerAuth: []
parameters:
- in: path
name: username
required: true
description: User to delete
type: string
responses:
200:
description: Deleted user
400:
description: Bad request
401:
description: Unauthorized
404:
description: User not found
"""
with WriteUserData() as data:
if username == data["username"]:
return {"error": "Cannot delete root user"}, 400
# Return 400 if user does not exist
for user in data["users"]:
if user["username"] == username:
data["users"].remove(user)
break
else:
return {"error": "User does not exist"}, 404
return {"result": 0, "username": username}

View file

View file

@ -0,0 +1,127 @@
from datetime import datetime
from typing import Optional
from fastapi import APIRouter, Depends, HTTPException
from pydantic import BaseModel
from selfprivacy_api.actions.api_tokens import (
CannotDeleteCallerException,
InvalidExpirationDate,
InvalidUsesLeft,
NotFoundException,
delete_api_token,
get_api_recovery_token_status,
get_api_tokens_with_caller_flag,
get_new_api_recovery_key,
refresh_api_token,
)
from selfprivacy_api.dependencies import TokenHeader, get_token_header
from selfprivacy_api.utils.auth import (
delete_new_device_auth_token,
get_new_device_auth_token,
use_mnemonic_recoverery_token,
use_new_device_auth_token,
)
router = APIRouter(
prefix="/auth",
tags=["auth"],
responses={404: {"description": "Not found"}},
)
@router.get("/tokens")
async def rest_get_tokens(auth_token: TokenHeader = Depends(get_token_header)):
"""Get the tokens info"""
return get_api_tokens_with_caller_flag(auth_token.token)
class DeleteTokenInput(BaseModel):
"""Delete token input"""
token_name: str
@router.delete("/tokens")
async def rest_delete_tokens(
token: DeleteTokenInput, auth_token: TokenHeader = Depends(get_token_header)
):
"""Delete the tokens"""
try:
delete_api_token(auth_token.token, token.token_name)
except NotFoundException:
raise HTTPException(status_code=404, detail="Token not found")
except CannotDeleteCallerException:
raise HTTPException(status_code=400, detail="Cannot delete caller's token")
return {"message": "Token deleted"}
@router.post("/tokens")
async def rest_refresh_token(auth_token: TokenHeader = Depends(get_token_header)):
"""Refresh the token"""
try:
new_token = refresh_api_token(auth_token.token)
except NotFoundException:
raise HTTPException(status_code=404, detail="Token not found")
return {"token": new_token}
@router.get("/recovery_token")
async def rest_get_recovery_token_status(
auth_token: TokenHeader = Depends(get_token_header),
):
return get_api_recovery_token_status()
class CreateRecoveryTokenInput(BaseModel):
expiration: Optional[datetime] = None
uses: Optional[int] = None
@router.post("/recovery_token")
async def rest_create_recovery_token(
limits: CreateRecoveryTokenInput = CreateRecoveryTokenInput(),
auth_token: TokenHeader = Depends(get_token_header),
):
try:
token = get_new_api_recovery_key(limits.expiration, limits.uses)
except InvalidExpirationDate as e:
raise HTTPException(status_code=400, detail=str(e))
except InvalidUsesLeft as e:
raise HTTPException(status_code=400, detail=str(e))
return {"token": token}
class UseTokenInput(BaseModel):
token: str
device: str
@router.post("/recovery_token/use")
async def rest_use_recovery_token(input: UseTokenInput):
token = use_mnemonic_recoverery_token(input.token, input.device)
if token is None:
raise HTTPException(status_code=404, detail="Token not found")
return {"token": token}
@router.post("/new_device")
async def rest_new_device(auth_token: TokenHeader = Depends(get_token_header)):
token = get_new_device_auth_token()
return {"token": token}
@router.delete("/new_device")
async def rest_delete_new_device_token(
auth_token: TokenHeader = Depends(get_token_header),
):
delete_new_device_auth_token()
return {"token": None}
@router.post("/new_device/authorize")
async def rest_new_device_authorize(input: UseTokenInput):
token = use_new_device_auth_token(input.token, input.device)
if token is None:
raise HTTPException(status_code=404, detail="Token not found")
return {"message": "Device authorized", "token": token}

View file

@ -0,0 +1,373 @@
"""Basic services legacy api"""
import base64
from typing import Optional
from fastapi import APIRouter, Depends, HTTPException
from pydantic import BaseModel
from selfprivacy_api.actions.ssh import (
InvalidPublicKey,
KeyAlreadyExists,
KeyNotFound,
create_ssh_key,
enable_ssh,
get_ssh_settings,
remove_ssh_key,
set_ssh_settings,
)
from selfprivacy_api.actions.users import UserNotFound, get_user_by_username
from selfprivacy_api.dependencies import get_token_header
from selfprivacy_api.restic_controller import ResticController, ResticStates
from selfprivacy_api.restic_controller import tasks as restic_tasks
from selfprivacy_api.services.bitwarden import Bitwarden
from selfprivacy_api.services.gitea import Gitea
from selfprivacy_api.services.mailserver import MailServer
from selfprivacy_api.services.nextcloud import Nextcloud
from selfprivacy_api.services.ocserv import Ocserv
from selfprivacy_api.services.pleroma import Pleroma
from selfprivacy_api.services.service import ServiceStatus
from selfprivacy_api.utils import WriteUserData, get_dkim_key, get_domain
router = APIRouter(
prefix="/services",
tags=["services"],
dependencies=[Depends(get_token_header)],
responses={404: {"description": "Not found"}},
)
def service_status_to_return_code(status: ServiceStatus):
"""Converts service status object to return code for
compatibility with legacy api"""
if status == ServiceStatus.ACTIVE:
return 0
elif status == ServiceStatus.FAILED:
return 1
elif status == ServiceStatus.INACTIVE:
return 3
elif status == ServiceStatus.OFF:
return 4
else:
return 2
@router.get("/status")
async def get_status():
"""Get the status of the services"""
mail_status = MailServer.get_status()
bitwarden_status = Bitwarden.get_status()
gitea_status = Gitea.get_status()
nextcloud_status = Nextcloud.get_status()
ocserv_stauts = Ocserv.get_status()
pleroma_status = Pleroma.get_status()
return {
"imap": service_status_to_return_code(mail_status),
"smtp": service_status_to_return_code(mail_status),
"http": 0,
"bitwarden": service_status_to_return_code(bitwarden_status),
"gitea": service_status_to_return_code(gitea_status),
"nextcloud": service_status_to_return_code(nextcloud_status),
"ocserv": service_status_to_return_code(ocserv_stauts),
"pleroma": service_status_to_return_code(pleroma_status),
}
@router.post("/bitwarden/enable")
async def enable_bitwarden():
"""Enable Bitwarden"""
Bitwarden.enable()
return {
"status": 0,
"message": "Bitwarden enabled",
}
@router.post("/bitwarden/disable")
async def disable_bitwarden():
"""Disable Bitwarden"""
Bitwarden.disable()
return {
"status": 0,
"message": "Bitwarden disabled",
}
@router.post("/gitea/enable")
async def enable_gitea():
"""Enable Gitea"""
Gitea.enable()
return {
"status": 0,
"message": "Gitea enabled",
}
@router.post("/gitea/disable")
async def disable_gitea():
"""Disable Gitea"""
Gitea.disable()
return {
"status": 0,
"message": "Gitea disabled",
}
@router.get("/mailserver/dkim")
async def get_mailserver_dkim():
"""Get the DKIM record for the mailserver"""
domain = get_domain()
dkim = get_dkim_key(domain)
if dkim is None:
raise HTTPException(status_code=404, detail="DKIM record not found")
dkim = base64.b64encode(dkim.encode("utf-8")).decode("utf-8")
return dkim
@router.post("/nextcloud/enable")
async def enable_nextcloud():
"""Enable Nextcloud"""
Nextcloud.enable()
return {
"status": 0,
"message": "Nextcloud enabled",
}
@router.post("/nextcloud/disable")
async def disable_nextcloud():
"""Disable Nextcloud"""
Nextcloud.disable()
return {
"status": 0,
"message": "Nextcloud disabled",
}
@router.post("/ocserv/enable")
async def enable_ocserv():
"""Enable Ocserv"""
Ocserv.enable()
return {
"status": 0,
"message": "Ocserv enabled",
}
@router.post("/ocserv/disable")
async def disable_ocserv():
"""Disable Ocserv"""
Ocserv.disable()
return {
"status": 0,
"message": "Ocserv disabled",
}
@router.post("/pleroma/enable")
async def enable_pleroma():
"""Enable Pleroma"""
Pleroma.enable()
return {
"status": 0,
"message": "Pleroma enabled",
}
@router.post("/pleroma/disable")
async def disable_pleroma():
"""Disable Pleroma"""
Pleroma.disable()
return {
"status": 0,
"message": "Pleroma disabled",
}
@router.get("/restic/backup/list")
async def get_restic_backup_list():
restic = ResticController()
return restic.snapshot_list
@router.put("/restic/backup/create")
async def create_restic_backup():
restic = ResticController()
if restic.state is ResticStates.NO_KEY:
raise HTTPException(status_code=400, detail="Backup key not provided")
if restic.state is ResticStates.INITIALIZING:
raise HTTPException(status_code=400, detail="Backup is initializing")
if restic.state is ResticStates.BACKING_UP:
raise HTTPException(status_code=409, detail="Backup is already running")
restic_tasks.start_backup()
return {
"status": 0,
"message": "Backup creation has started",
}
@router.get("/restic/backup/status")
async def get_restic_backup_status():
restic = ResticController()
return {
"status": restic.state.name,
"progress": restic.progress,
"error_message": restic.error_message,
}
@router.get("/restic/backup/reload")
async def reload_restic_backup():
restic_tasks.load_snapshots()
return {
"status": 0,
"message": "Snapshots reload started",
}
class BackupRestoreInput(BaseModel):
backupId: str
@router.put("/restic/backup/restore")
async def restore_restic_backup(backup: BackupRestoreInput):
restic = ResticController()
if restic.state is ResticStates.NO_KEY:
raise HTTPException(status_code=400, detail="Backup key not provided")
if restic.state is ResticStates.NOT_INITIALIZED:
raise HTTPException(
status_code=400, detail="Backups repository is not initialized"
)
if restic.state is ResticStates.BACKING_UP:
raise HTTPException(status_code=409, detail="Backup is already running")
if restic.state is ResticStates.INITIALIZING:
raise HTTPException(status_code=400, detail="Repository is initializing")
if restic.state is ResticStates.RESTORING:
raise HTTPException(status_code=409, detail="Restore is already running")
for backup_item in restic.snapshot_list:
if backup_item["short_id"] == backup.backupId:
restic_tasks.restore_from_backup(backup.backupId)
return {
"status": 0,
"message": "Backup restoration procedure started",
}
raise HTTPException(status_code=404, detail="Backup not found")
class BackblazeConfigInput(BaseModel):
accountId: str
accountKey: str
bucket: str
@router.put("/restic/backblaze/config")
async def set_backblaze_config(backblaze_config: BackblazeConfigInput):
with WriteUserData() as data:
if "backblaze" not in data:
data["backblaze"] = {}
data["backblaze"]["accountId"] = backblaze_config.accountId
data["backblaze"]["accountKey"] = backblaze_config.accountKey
data["backblaze"]["bucket"] = backblaze_config.bucket
restic_tasks.update_keys_from_userdata()
return "New Backblaze settings saved"
@router.post("/ssh/enable")
async def rest_enable_ssh():
"""Enable SSH"""
enable_ssh()
return {
"status": 0,
"message": "SSH enabled",
}
@router.get("/ssh")
async def rest_get_ssh():
"""Get the SSH configuration"""
settings = get_ssh_settings()
return {
"enable": settings.enable,
"passwordAuthentication": settings.passwordAuthentication,
}
class SshConfigInput(BaseModel):
enable: Optional[bool] = None
passwordAuthentication: Optional[bool] = None
@router.put("/ssh")
async def rest_set_ssh(ssh_config: SshConfigInput):
"""Set the SSH configuration"""
set_ssh_settings(ssh_config.enable, ssh_config.passwordAuthentication)
return "SSH settings changed"
class SshKeyInput(BaseModel):
public_key: str
@router.put("/ssh/key/send", status_code=201)
async def rest_send_ssh_key(input: SshKeyInput):
"""Send the SSH key"""
try:
create_ssh_key("root", input.public_key)
except KeyAlreadyExists as error:
raise HTTPException(status_code=409, detail="Key already exists") from error
except InvalidPublicKey as error:
raise HTTPException(
status_code=400,
detail="Invalid key type. Only ssh-ed25519 and ssh-rsa are supported",
) from error
return {
"status": 0,
"message": "SSH key sent",
}
@router.get("/ssh/keys/{username}")
async def rest_get_ssh_keys(username: str):
"""Get the SSH keys for a user"""
user = get_user_by_username(username)
if user is None:
raise HTTPException(status_code=404, detail="User not found")
return user.ssh_keys
@router.post("/ssh/keys/{username}", status_code=201)
async def rest_add_ssh_key(username: str, input: SshKeyInput):
try:
create_ssh_key(username, input.public_key)
except KeyAlreadyExists as error:
raise HTTPException(status_code=409, detail="Key already exists") from error
except InvalidPublicKey as error:
raise HTTPException(
status_code=400,
detail="Invalid key type. Only ssh-ed25519 and ssh-rsa are supported",
) from error
except UserNotFound as error:
raise HTTPException(status_code=404, detail="User not found") from error
return {
"message": "New SSH key successfully written",
}
@router.delete("/ssh/keys/{username}")
async def rest_delete_ssh_key(username: str, input: SshKeyInput):
try:
remove_ssh_key(username, input.public_key)
except KeyNotFound as error:
raise HTTPException(status_code=404, detail="Key not found") from error
except UserNotFound as error:
raise HTTPException(status_code=404, detail="User not found") from error
return {"message": "SSH key deleted"}

View file

@ -0,0 +1,105 @@
from typing import Optional
from fastapi import APIRouter, Body, Depends, HTTPException
from pydantic import BaseModel
from selfprivacy_api.dependencies import get_token_header
import selfprivacy_api.actions.system as system_actions
router = APIRouter(
prefix="/system",
tags=["system"],
dependencies=[Depends(get_token_header)],
responses={404: {"description": "Not found"}},
)
@router.get("/configuration/timezone")
async def get_timezone():
"""Get the timezone of the server"""
return system_actions.get_timezone()
class ChangeTimezoneRequestBody(BaseModel):
"""Change the timezone of the server"""
timezone: str
@router.put("/configuration/timezone")
async def change_timezone(timezone: ChangeTimezoneRequestBody):
"""Change the timezone of the server"""
try:
system_actions.change_timezone(timezone.timezone)
except system_actions.InvalidTimezone as e:
raise HTTPException(status_code=400, detail=str(e))
return {"timezone": timezone.timezone}
@router.get("/configuration/autoUpgrade")
async def get_auto_upgrade_settings():
"""Get the auto-upgrade settings"""
return system_actions.get_auto_upgrade_settings().dict()
class AutoUpgradeSettings(BaseModel):
"""Settings for auto-upgrading user data"""
enable: Optional[bool] = None
allowReboot: Optional[bool] = None
@router.put("/configuration/autoUpgrade")
async def set_auto_upgrade_settings(settings: AutoUpgradeSettings):
"""Set the auto-upgrade settings"""
system_actions.set_auto_upgrade_settings(settings.enable, settings.allowReboot)
return "Auto-upgrade settings changed"
@router.get("/configuration/apply")
async def apply_configuration():
"""Apply the configuration"""
return_code = system_actions.rebuild_system()
return return_code
@router.get("/configuration/rollback")
async def rollback_configuration():
"""Rollback the configuration"""
return_code = system_actions.rollback_system()
return return_code
@router.get("/configuration/upgrade")
async def upgrade_configuration():
"""Upgrade the configuration"""
return_code = system_actions.upgrade_system()
return return_code
@router.get("/reboot")
async def reboot_system():
"""Reboot the system"""
system_actions.reboot_system()
return "System reboot has started"
@router.get("/version")
async def get_system_version():
"""Get the system version"""
return {"system_version": system_actions.get_system_version()}
@router.get("/pythonVersion")
async def get_python_version():
"""Get the Python version"""
return system_actions.get_python_version()
@router.get("/configuration/pull")
async def pull_configuration():
"""Pull the configuration"""
action_result = system_actions.pull_repository_changes()
if action_result.status == 0:
return action_result.dict()
raise HTTPException(status_code=500, detail=action_result.dict())

View file

@ -0,0 +1,62 @@
"""Users management module"""
from typing import Optional
from fastapi import APIRouter, Body, Depends, HTTPException
from pydantic import BaseModel
import selfprivacy_api.actions.users as users_actions
from selfprivacy_api.dependencies import get_token_header
router = APIRouter(
prefix="/users",
tags=["users"],
dependencies=[Depends(get_token_header)],
responses={404: {"description": "Not found"}},
)
@router.get("")
async def get_users(withMainUser: bool = False):
"""Get the list of users"""
users: list[users_actions.UserDataUser] = users_actions.get_users(
exclude_primary=not withMainUser, exclude_root=True
)
return [user.username for user in users]
class UserInput(BaseModel):
"""User input"""
username: str
password: str
@router.post("", status_code=201)
async def create_user(user: UserInput):
try:
users_actions.create_user(user.username, user.password)
except users_actions.PasswordIsEmpty as e:
raise HTTPException(status_code=400, detail=str(e))
except users_actions.UsernameForbidden as e:
raise HTTPException(status_code=409, detail=str(e))
except users_actions.UsernameNotAlphanumeric as e:
raise HTTPException(status_code=400, detail=str(e))
except users_actions.UsernameTooLong as e:
raise HTTPException(status_code=400, detail=str(e))
except users_actions.UserAlreadyExists as e:
raise HTTPException(status_code=409, detail=str(e))
return {"result": 0, "username": user.username}
@router.delete("/{username}")
async def delete_user(username: str):
try:
users_actions.delete_user(username)
except users_actions.UserNotFound as e:
raise HTTPException(status_code=404, detail=str(e))
except users_actions.UserIsProtected as e:
raise HTTPException(status_code=400, detail=str(e))
return {"result": 0, "username": username}

View file

@ -1,10 +1,8 @@
"""Tasks for the restic controller."""
from huey import crontab
from huey.contrib.mini import MiniHuey
from selfprivacy_api.utils.huey import huey
from . import ResticController, ResticStates
huey = MiniHuey()
@huey.task()
def init_restic():

View file

@ -0,0 +1,67 @@
"""Services module."""
import typing
from selfprivacy_api.services.bitwarden import Bitwarden
from selfprivacy_api.services.gitea import Gitea
from selfprivacy_api.services.jitsi import Jitsi
from selfprivacy_api.services.mailserver import MailServer
from selfprivacy_api.services.nextcloud import Nextcloud
from selfprivacy_api.services.pleroma import Pleroma
from selfprivacy_api.services.ocserv import Ocserv
from selfprivacy_api.services.service import Service, ServiceDnsRecord
import selfprivacy_api.utils.network as network_utils
services: list[Service] = [
Bitwarden(),
Gitea(),
MailServer(),
Nextcloud(),
Pleroma(),
Ocserv(),
Jitsi(),
]
def get_all_services() -> list[Service]:
return services
def get_service_by_id(service_id: str) -> typing.Optional[Service]:
for service in services:
if service.get_id() == service_id:
return service
return None
def get_enabled_services() -> list[Service]:
return [service for service in services if service.is_enabled()]
def get_disabled_services() -> list[Service]:
return [service for service in services if not service.is_enabled()]
def get_services_by_location(location: str) -> list[Service]:
return [service for service in services if service.get_location() == location]
def get_all_required_dns_records() -> list[ServiceDnsRecord]:
ip4 = network_utils.get_ip4()
ip6 = network_utils.get_ip6()
dns_records: list[ServiceDnsRecord] = [
ServiceDnsRecord(
type="A",
name="api",
content=ip4,
ttl=3600,
),
ServiceDnsRecord(
type="AAAA",
name="api",
content=ip6,
ttl=3600,
),
]
for service in get_enabled_services():
dns_records += service.get_dns_records()
return dns_records

View file

@ -0,0 +1,174 @@
"""Class representing Bitwarden service"""
import base64
import subprocess
import typing
from selfprivacy_api.jobs import Job, JobStatus, Jobs
from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service
from selfprivacy_api.services.generic_size_counter import get_storage_usage
from selfprivacy_api.services.generic_status_getter import get_service_status
from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus
from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain
from selfprivacy_api.utils.block_devices import BlockDevice
from selfprivacy_api.utils.huey import huey
import selfprivacy_api.utils.network as network_utils
from selfprivacy_api.services.bitwarden.icon import BITWARDEN_ICON
class Bitwarden(Service):
"""Class representing Bitwarden service."""
@staticmethod
def get_id() -> str:
"""Return service id."""
return "bitwarden"
@staticmethod
def get_display_name() -> str:
"""Return service display name."""
return "Bitwarden"
@staticmethod
def get_description() -> str:
"""Return service description."""
return "Bitwarden is a password manager."
@staticmethod
def get_svg_icon() -> str:
"""Read SVG icon from file and return it as base64 encoded string."""
return base64.b64encode(BITWARDEN_ICON.encode("utf-8")).decode("utf-8")
@staticmethod
def get_url() -> typing.Optional[str]:
"""Return service url."""
domain = get_domain()
return f"https://password.{domain}"
@staticmethod
def is_movable() -> bool:
return True
@staticmethod
def is_required() -> bool:
return False
@staticmethod
def is_enabled() -> bool:
with ReadUserData() as user_data:
return user_data.get("bitwarden", {}).get("enable", False)
@staticmethod
def get_status() -> ServiceStatus:
"""
Return Bitwarden status from systemd.
Use command return code to determine status.
Return code 0 means service is running.
Return code 1 or 2 means service is in error stat.
Return code 3 means service is stopped.
Return code 4 means service is off.
"""
return get_service_status("vaultwarden.service")
@staticmethod
def enable():
"""Enable Bitwarden service."""
with WriteUserData() as user_data:
if "bitwarden" not in user_data:
user_data["bitwarden"] = {}
user_data["bitwarden"]["enable"] = True
@staticmethod
def disable():
"""Disable Bitwarden service."""
with WriteUserData() as user_data:
if "bitwarden" not in user_data:
user_data["bitwarden"] = {}
user_data["bitwarden"]["enable"] = False
@staticmethod
def stop():
subprocess.run(["systemctl", "stop", "vaultwarden.service"])
@staticmethod
def start():
subprocess.run(["systemctl", "start", "vaultwarden.service"])
@staticmethod
def restart():
subprocess.run(["systemctl", "restart", "vaultwarden.service"])
@staticmethod
def get_configuration():
return {}
@staticmethod
def set_configuration(config_items):
return super().set_configuration(config_items)
@staticmethod
def get_logs():
return ""
@staticmethod
def get_storage_usage() -> int:
storage_usage = 0
storage_usage += get_storage_usage("/var/lib/bitwarden")
storage_usage += get_storage_usage("/var/lib/bitwarden_rs")
return storage_usage
@staticmethod
def get_location() -> str:
with ReadUserData() as user_data:
if user_data.get("useBinds", False):
return user_data.get("bitwarden", {}).get("location", "sda1")
else:
return "sda1"
@staticmethod
def get_dns_records() -> typing.List[ServiceDnsRecord]:
"""Return list of DNS records for Bitwarden service."""
return [
ServiceDnsRecord(
type="A",
name="password",
content=network_utils.get_ip4(),
ttl=3600,
),
ServiceDnsRecord(
type="AAAA",
name="password",
content=network_utils.get_ip6(),
ttl=3600,
),
]
def move_to_volume(self, volume: BlockDevice) -> Job:
job = Jobs.get_instance().add(
type_id="services.bitwarden.move",
name="Move Bitwarden",
description=f"Moving Bitwarden data to {volume.name}",
)
move_service(
self,
volume,
job,
[
FolderMoveNames(
name="bitwarden",
bind_location="/var/lib/bitwarden",
group="vaultwarden",
owner="vaultwarden",
),
FolderMoveNames(
name="bitwarden_rs",
bind_location="/var/lib/bitwarden_rs",
group="vaultwarden",
owner="vaultwarden",
),
],
"bitwarden",
)
return job

View file

@ -0,0 +1,3 @@
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M5.125 2C4.2962 2 3.50134 2.32924 2.91529 2.91529C2.32924 3.50134 2 4.2962 2 5.125L2 18.875C2 19.7038 2.32924 20.4987 2.91529 21.0847C3.50134 21.6708 4.2962 22 5.125 22H18.875C19.7038 22 20.4987 21.6708 21.0847 21.0847C21.6708 20.4987 22 19.7038 22 18.875V5.125C22 4.2962 21.6708 3.50134 21.0847 2.91529C20.4987 2.32924 19.7038 2 18.875 2H5.125ZM6.25833 4.43333H17.7583C17.9317 4.43333 18.0817 4.49667 18.2083 4.62333C18.2688 4.68133 18.3168 4.7511 18.3494 4.82835C18.3819 4.9056 18.3983 4.98869 18.3975 5.0725V12.7392C18.3975 13.3117 18.2858 13.8783 18.0633 14.4408C17.8558 14.9751 17.5769 15.4789 17.2342 15.9383C16.8824 16.3987 16.4882 16.825 16.0567 17.2117C15.6008 17.6242 15.18 17.9667 14.7942 18.24C14.4075 18.5125 14.005 18.77 13.5858 19.0133C13.1667 19.2558 12.8692 19.4208 12.6925 19.5075C12.5158 19.5942 12.375 19.6608 12.2675 19.7075C12.1872 19.7472 12.0987 19.7674 12.0092 19.7667C11.919 19.7674 11.8299 19.7468 11.7492 19.7067C11.6062 19.6429 11.4645 19.5762 11.3242 19.5067C11.0218 19.3511 10.7242 19.1866 10.4317 19.0133C10.0175 18.7738 9.6143 18.5158 9.22333 18.24C8.7825 17.9225 8.36093 17.5791 7.96083 17.2117C7.52907 16.825 7.13456 16.3987 6.7825 15.9383C6.44006 15.4788 6.16141 14.9751 5.95417 14.4408C5.73555 13.9 5.62213 13.3225 5.62 12.7392V5.0725C5.62 4.89917 5.68333 4.75 5.80917 4.6225C5.86726 4.56188 5.93717 4.51382 6.01457 4.48129C6.09196 4.44875 6.17521 4.43243 6.25917 4.43333H6.25833ZM12.0083 6.35V17.7C12.8 17.2817 13.5092 16.825 14.135 16.3333C15.6992 15.1083 16.4808 13.9108 16.4808 12.7392V6.35H12.0083Z" fill="black"/>
</svg>

After

Width:  |  Height:  |  Size: 1.6 KiB

View file

@ -0,0 +1,5 @@
BITWARDEN_ICON = """
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M5.125 2C4.2962 2 3.50134 2.32924 2.91529 2.91529C2.32924 3.50134 2 4.2962 2 5.125L2 18.875C2 19.7038 2.32924 20.4987 2.91529 21.0847C3.50134 21.6708 4.2962 22 5.125 22H18.875C19.7038 22 20.4987 21.6708 21.0847 21.0847C21.6708 20.4987 22 19.7038 22 18.875V5.125C22 4.2962 21.6708 3.50134 21.0847 2.91529C20.4987 2.32924 19.7038 2 18.875 2H5.125ZM6.25833 4.43333H17.7583C17.9317 4.43333 18.0817 4.49667 18.2083 4.62333C18.2688 4.68133 18.3168 4.7511 18.3494 4.82835C18.3819 4.9056 18.3983 4.98869 18.3975 5.0725V12.7392C18.3975 13.3117 18.2858 13.8783 18.0633 14.4408C17.8558 14.9751 17.5769 15.4789 17.2342 15.9383C16.8824 16.3987 16.4882 16.825 16.0567 17.2117C15.6008 17.6242 15.18 17.9667 14.7942 18.24C14.4075 18.5125 14.005 18.77 13.5858 19.0133C13.1667 19.2558 12.8692 19.4208 12.6925 19.5075C12.5158 19.5942 12.375 19.6608 12.2675 19.7075C12.1872 19.7472 12.0987 19.7674 12.0092 19.7667C11.919 19.7674 11.8299 19.7468 11.7492 19.7067C11.6062 19.6429 11.4645 19.5762 11.3242 19.5067C11.0218 19.3511 10.7242 19.1866 10.4317 19.0133C10.0175 18.7738 9.6143 18.5158 9.22333 18.24C8.7825 17.9225 8.36093 17.5791 7.96083 17.2117C7.52907 16.825 7.13456 16.3987 6.7825 15.9383C6.44006 15.4788 6.16141 14.9751 5.95417 14.4408C5.73555 13.9 5.62213 13.3225 5.62 12.7392V5.0725C5.62 4.89917 5.68333 4.75 5.80917 4.6225C5.86726 4.56188 5.93717 4.51382 6.01457 4.48129C6.09196 4.44875 6.17521 4.43243 6.25917 4.43333H6.25833ZM12.0083 6.35V17.7C12.8 17.2817 13.5092 16.825 14.135 16.3333C15.6992 15.1083 16.4808 13.9108 16.4808 12.7392V6.35H12.0083Z" fill="black"/>
</svg>
"""

View file

@ -0,0 +1,237 @@
"""Generic handler for moving services"""
import subprocess
import time
import pathlib
import shutil
from pydantic import BaseModel
from selfprivacy_api.jobs import Job, JobStatus, Jobs
from selfprivacy_api.utils.huey import huey
from selfprivacy_api.utils.block_devices import BlockDevice
from selfprivacy_api.utils import ReadUserData, WriteUserData
from selfprivacy_api.services.service import Service, ServiceStatus
class FolderMoveNames(BaseModel):
name: str
bind_location: str
owner: str
group: str
@huey.task()
def move_service(
service: Service,
volume: BlockDevice,
job: Job,
folder_names: list[FolderMoveNames],
userdata_location: str,
):
"""Move a service to another volume."""
job = Jobs.get_instance().update(
job=job,
status_text="Performing pre-move checks...",
status=JobStatus.RUNNING,
)
service_name = service.get_display_name()
with ReadUserData() as user_data:
if not user_data.get("useBinds", False):
Jobs.get_instance().update(
job=job,
status=JobStatus.ERROR,
error="Server is not using binds.",
)
return
# Check if we are on the same volume
old_volume = service.get_location()
if old_volume == volume.name:
Jobs.get_instance().update(
job=job,
status=JobStatus.ERROR,
error=f"{service_name} is already on this volume.",
)
return
# Check if there is enough space on the new volume
if int(volume.fsavail) < service.get_storage_usage():
Jobs.get_instance().update(
job=job,
status=JobStatus.ERROR,
error="Not enough space on the new volume.",
)
return
# Make sure the volume is mounted
if f"/volumes/{volume.name}" not in volume.mountpoints:
Jobs.get_instance().update(
job=job,
status=JobStatus.ERROR,
error="Volume is not mounted.",
)
return
# Make sure current actual directory exists and if its user and group are correct
for folder in folder_names:
if not pathlib.Path(f"/volumes/{old_volume}/{folder.name}").exists():
Jobs.get_instance().update(
job=job,
status=JobStatus.ERROR,
error=f"{service_name} is not found.",
)
return
if not pathlib.Path(f"/volumes/{old_volume}/{folder.name}").is_dir():
Jobs.get_instance().update(
job=job,
status=JobStatus.ERROR,
error=f"{service_name} is not a directory.",
)
return
if (
not pathlib.Path(f"/volumes/{old_volume}/{folder.name}").owner()
== folder.owner
):
Jobs.get_instance().update(
job=job,
status=JobStatus.ERROR,
error=f"{service_name} owner is not {folder.owner}.",
)
return
# Stop service
Jobs.get_instance().update(
job=job,
status=JobStatus.RUNNING,
status_text=f"Stopping {service_name}...",
progress=5,
)
service.stop()
# Wait for the service to stop, check every second
# If it does not stop in 30 seconds, abort
for _ in range(30):
if service.get_status() not in (
ServiceStatus.ACTIVATING,
ServiceStatus.DEACTIVATING,
):
break
time.sleep(1)
else:
Jobs.get_instance().update(
job=job,
status=JobStatus.ERROR,
error=f"{service_name} did not stop in 30 seconds.",
)
return
# Unmount old volume
Jobs.get_instance().update(
job=job,
status_text="Unmounting old folder...",
status=JobStatus.RUNNING,
progress=10,
)
for folder in folder_names:
try:
subprocess.run(
["umount", folder.bind_location],
check=True,
)
except subprocess.CalledProcessError:
Jobs.get_instance().update(
job=job,
status=JobStatus.ERROR,
error="Unable to unmount old volume.",
)
return
# Move data to new volume and set correct permissions
Jobs.get_instance().update(
job=job,
status_text="Moving data to new volume...",
status=JobStatus.RUNNING,
progress=20,
)
current_progress = 20
folder_percentage = 50 // len(folder_names)
for folder in folder_names:
shutil.move(
f"/volumes/{old_volume}/{folder.name}",
f"/volumes/{volume.name}/{folder.name}",
)
Jobs.get_instance().update(
job=job,
status_text="Moving data to new volume...",
status=JobStatus.RUNNING,
progress=current_progress + folder_percentage,
)
Jobs.get_instance().update(
job=job,
status_text=f"Making sure {service_name} owns its files...",
status=JobStatus.RUNNING,
progress=70,
)
for folder in folder_names:
try:
subprocess.run(
[
"chown",
"-R",
f"{folder.owner}:f{folder.group}",
f"/volumes/{volume.name}/{folder.name}",
],
check=True,
)
except subprocess.CalledProcessError as error:
print(error.output)
Jobs.get_instance().update(
job=job,
status=JobStatus.RUNNING,
error=f"Unable to set ownership of new volume. {service_name} may not be able to access its files. Continuing anyway.",
)
return
# Mount new volume
Jobs.get_instance().update(
job=job,
status_text=f"Mounting {service_name} data...",
status=JobStatus.RUNNING,
progress=90,
)
for folder in folder_names:
try:
subprocess.run(
[
"mount",
"--bind",
f"/volumes/{volume.name}/{folder.name}",
folder.bind_location,
],
check=True,
)
except subprocess.CalledProcessError as error:
print(error.output)
Jobs.get_instance().update(
job=job,
status=JobStatus.ERROR,
error="Unable to mount new volume.",
)
return
# Update userdata
Jobs.get_instance().update(
job=job,
status_text="Finishing move...",
status=JobStatus.RUNNING,
progress=95,
)
with WriteUserData() as user_data:
if userdata_location not in user_data:
user_data[userdata_location] = {}
user_data[userdata_location]["location"] = volume.name
# Start service
service.start()
Jobs.get_instance().update(
job=job,
status=JobStatus.FINISHED,
result=f"{service_name} moved successfully.",
status_text=f"Starting {service}...",
progress=100,
)

View file

@ -0,0 +1,16 @@
"""Generic size counter using pathlib"""
import pathlib
def get_storage_usage(path: str) -> int:
"""
Calculate the real storage usage of path and all subdirectories.
Calculate using pathlib.
Do not follow symlinks.
"""
storage_usage = 0
for iter_path in pathlib.Path(path).rglob("**/*"):
if iter_path.is_dir():
continue
storage_usage += iter_path.stat().st_size
return storage_usage

View file

@ -0,0 +1,60 @@
"""Generic service status fetcher using systemctl"""
import subprocess
from selfprivacy_api.services.service import ServiceStatus
def get_service_status(service: str) -> ServiceStatus:
"""
Return service status from systemd.
Use systemctl show to get the status of a service.
Get ActiveState from the output.
"""
service_status = subprocess.check_output(["systemctl", "show", service])
if b"LoadState=not-found" in service_status:
return ServiceStatus.OFF
if b"ActiveState=active" in service_status:
return ServiceStatus.ACTIVE
if b"ActiveState=inactive" in service_status:
return ServiceStatus.INACTIVE
if b"ActiveState=activating" in service_status:
return ServiceStatus.ACTIVATING
if b"ActiveState=deactivating" in service_status:
return ServiceStatus.DEACTIVATING
if b"ActiveState=failed" in service_status:
return ServiceStatus.FAILED
if b"ActiveState=reloading" in service_status:
return ServiceStatus.RELOADING
return ServiceStatus.OFF
def get_service_status_from_several_units(services: list[str]) -> ServiceStatus:
"""
Fetch all service statuses for all services and return the worst status.
Statuses from worst to best:
- OFF
- FAILED
- RELOADING
- ACTIVATING
- DEACTIVATING
- INACTIVE
- ACTIVE
"""
service_statuses = []
for service in services:
service_statuses.append(get_service_status(service))
if ServiceStatus.OFF in service_statuses:
return ServiceStatus.OFF
if ServiceStatus.FAILED in service_statuses:
return ServiceStatus.FAILED
if ServiceStatus.RELOADING in service_statuses:
return ServiceStatus.RELOADING
if ServiceStatus.ACTIVATING in service_statuses:
return ServiceStatus.ACTIVATING
if ServiceStatus.DEACTIVATING in service_statuses:
return ServiceStatus.DEACTIVATING
if ServiceStatus.INACTIVE in service_statuses:
return ServiceStatus.INACTIVE
if ServiceStatus.ACTIVE in service_statuses:
return ServiceStatus.ACTIVE
return ServiceStatus.OFF

View file

@ -0,0 +1,165 @@
"""Class representing Bitwarden service"""
import base64
import subprocess
import typing
from selfprivacy_api.jobs import Job, Jobs
from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service
from selfprivacy_api.services.generic_size_counter import get_storage_usage
from selfprivacy_api.services.generic_status_getter import get_service_status
from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus
from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain
from selfprivacy_api.utils.block_devices import BlockDevice
from selfprivacy_api.utils.huey import huey
import selfprivacy_api.utils.network as network_utils
from selfprivacy_api.services.gitea.icon import GITEA_ICON
class Gitea(Service):
"""Class representing Gitea service"""
@staticmethod
def get_id() -> str:
"""Return service id."""
return "gitea"
@staticmethod
def get_display_name() -> str:
"""Return service display name."""
return "Gitea"
@staticmethod
def get_description() -> str:
"""Return service description."""
return "Gitea is a Git forge."
@staticmethod
def get_svg_icon() -> str:
"""Read SVG icon from file and return it as base64 encoded string."""
return base64.b64encode(GITEA_ICON.encode("utf-8")).decode("utf-8")
@staticmethod
def get_url() -> typing.Optional[str]:
"""Return service url."""
domain = get_domain()
return f"https://git.{domain}"
@staticmethod
def is_movable() -> bool:
return True
@staticmethod
def is_required() -> bool:
return False
@staticmethod
def is_enabled() -> bool:
with ReadUserData() as user_data:
return user_data.get("gitea", {}).get("enable", False)
@staticmethod
def get_status() -> ServiceStatus:
"""
Return Gitea status from systemd.
Use command return code to determine status.
Return code 0 means service is running.
Return code 1 or 2 means service is in error stat.
Return code 3 means service is stopped.
Return code 4 means service is off.
"""
return get_service_status("gitea.service")
@staticmethod
def enable():
"""Enable Gitea service."""
with WriteUserData() as user_data:
if "gitea" not in user_data:
user_data["gitea"] = {}
user_data["gitea"]["enable"] = True
@staticmethod
def disable():
"""Disable Gitea service."""
with WriteUserData() as user_data:
if "gitea" not in user_data:
user_data["gitea"] = {}
user_data["gitea"]["enable"] = False
@staticmethod
def stop():
subprocess.run(["systemctl", "stop", "gitea.service"])
@staticmethod
def start():
subprocess.run(["systemctl", "start", "gitea.service"])
@staticmethod
def restart():
subprocess.run(["systemctl", "restart", "gitea.service"])
@staticmethod
def get_configuration():
return {}
@staticmethod
def set_configuration(config_items):
return super().set_configuration(config_items)
@staticmethod
def get_logs():
return ""
@staticmethod
def get_storage_usage() -> int:
storage_usage = 0
storage_usage += get_storage_usage("/var/lib/gitea")
return storage_usage
@staticmethod
def get_location() -> str:
with ReadUserData() as user_data:
if user_data.get("useBinds", False):
return user_data.get("gitea", {}).get("location", "sda1")
else:
return "sda1"
@staticmethod
def get_dns_records() -> typing.List[ServiceDnsRecord]:
return [
ServiceDnsRecord(
type="A",
name="git",
content=network_utils.get_ip4(),
ttl=3600,
),
ServiceDnsRecord(
type="AAAA",
name="git",
content=network_utils.get_ip6(),
ttl=3600,
),
]
def move_to_volume(self, volume: BlockDevice) -> Job:
job = Jobs.get_instance().add(
type_id="services.gitea.move",
name="Move Gitea",
description=f"Moving Gitea data to {volume.name}",
)
move_service(
self,
volume,
job,
[
FolderMoveNames(
name="gitea",
bind_location="/var/lib/gitea",
group="gitea",
owner="gitea",
),
],
"bitwarden",
)
return job

View file

@ -0,0 +1,3 @@
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M2.60007 10.5899L8.38007 4.79995L10.0701 6.49995C9.83007 7.34995 10.2201 8.27995 11.0001 8.72995V14.2699C10.4001 14.6099 10.0001 15.2599 10.0001 15.9999C10.0001 16.5304 10.2108 17.0391 10.5859 17.4142C10.9609 17.7892 11.4696 17.9999 12.0001 17.9999C12.5305 17.9999 13.0392 17.7892 13.4143 17.4142C13.7894 17.0391 14.0001 16.5304 14.0001 15.9999C14.0001 15.2599 13.6001 14.6099 13.0001 14.2699V9.40995L15.0701 11.4999C15.0001 11.6499 15.0001 11.8199 15.0001 11.9999C15.0001 12.5304 15.2108 13.0391 15.5859 13.4142C15.9609 13.7892 16.4696 13.9999 17.0001 13.9999C17.5305 13.9999 18.0392 13.7892 18.4143 13.4142C18.7894 13.0391 19.0001 12.5304 19.0001 11.9999C19.0001 11.4695 18.7894 10.9608 18.4143 10.5857C18.0392 10.2107 17.5305 9.99995 17.0001 9.99995C16.8201 9.99995 16.6501 9.99995 16.5001 10.0699L13.9301 7.49995C14.1901 6.56995 13.7101 5.54995 12.7801 5.15995C12.3501 4.99995 11.9001 4.95995 11.5001 5.06995L9.80007 3.37995L10.5901 2.59995C11.3701 1.80995 12.6301 1.80995 13.4101 2.59995L21.4001 10.5899C22.1901 11.3699 22.1901 12.6299 21.4001 13.4099L13.4101 21.3999C12.6301 22.1899 11.3701 22.1899 10.5901 21.3999L2.60007 13.4099C1.81007 12.6299 1.81007 11.3699 2.60007 10.5899Z" fill="black"/>
</svg>

After

Width:  |  Height:  |  Size: 1.3 KiB

View file

@ -0,0 +1,5 @@
GITEA_ICON = """
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M2.60007 10.5899L8.38007 4.79995L10.0701 6.49995C9.83007 7.34995 10.2201 8.27995 11.0001 8.72995V14.2699C10.4001 14.6099 10.0001 15.2599 10.0001 15.9999C10.0001 16.5304 10.2108 17.0391 10.5859 17.4142C10.9609 17.7892 11.4696 17.9999 12.0001 17.9999C12.5305 17.9999 13.0392 17.7892 13.4143 17.4142C13.7894 17.0391 14.0001 16.5304 14.0001 15.9999C14.0001 15.2599 13.6001 14.6099 13.0001 14.2699V9.40995L15.0701 11.4999C15.0001 11.6499 15.0001 11.8199 15.0001 11.9999C15.0001 12.5304 15.2108 13.0391 15.5859 13.4142C15.9609 13.7892 16.4696 13.9999 17.0001 13.9999C17.5305 13.9999 18.0392 13.7892 18.4143 13.4142C18.7894 13.0391 19.0001 12.5304 19.0001 11.9999C19.0001 11.4695 18.7894 10.9608 18.4143 10.5857C18.0392 10.2107 17.5305 9.99995 17.0001 9.99995C16.8201 9.99995 16.6501 9.99995 16.5001 10.0699L13.9301 7.49995C14.1901 6.56995 13.7101 5.54995 12.7801 5.15995C12.3501 4.99995 11.9001 4.95995 11.5001 5.06995L9.80007 3.37995L10.5901 2.59995C11.3701 1.80995 12.6301 1.80995 13.4101 2.59995L21.4001 10.5899C22.1901 11.3699 22.1901 12.6299 21.4001 13.4099L13.4101 21.3999C12.6301 22.1899 11.3701 22.1899 10.5901 21.3999L2.60007 13.4099C1.81007 12.6299 1.81007 11.3699 2.60007 10.5899Z" fill="black"/>
</svg>
"""

View file

@ -0,0 +1,142 @@
"""Class representing Jitsi service"""
import base64
import subprocess
import typing
from selfprivacy_api.jobs import Job, Jobs
from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service
from selfprivacy_api.services.generic_size_counter import get_storage_usage
from selfprivacy_api.services.generic_status_getter import (
get_service_status,
get_service_status_from_several_units,
)
from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus
from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain
from selfprivacy_api.utils.block_devices import BlockDevice
from selfprivacy_api.utils.huey import huey
import selfprivacy_api.utils.network as network_utils
from selfprivacy_api.services.jitsi.icon import JITSI_ICON
class Jitsi(Service):
"""Class representing Jitsi service"""
@staticmethod
def get_id() -> str:
"""Return service id."""
return "jitsi"
@staticmethod
def get_display_name() -> str:
"""Return service display name."""
return "Jitsi"
@staticmethod
def get_description() -> str:
"""Return service description."""
return "Jitsi is a free and open-source video conferencing solution."
@staticmethod
def get_svg_icon() -> str:
"""Read SVG icon from file and return it as base64 encoded string."""
return base64.b64encode(JITSI_ICON.encode("utf-8")).decode("utf-8")
@staticmethod
def get_url() -> typing.Optional[str]:
"""Return service url."""
domain = get_domain()
return f"https://meet.{domain}"
@staticmethod
def is_movable() -> bool:
return False
@staticmethod
def is_required() -> bool:
return False
@staticmethod
def is_enabled() -> bool:
with ReadUserData() as user_data:
return user_data.get("jitsi", {}).get("enable", False)
@staticmethod
def get_status() -> ServiceStatus:
return get_service_status_from_several_units(
["jitsi-videobridge.service", "jicofo.service"]
)
@staticmethod
def enable():
"""Enable Jitsi service."""
with WriteUserData() as user_data:
if "jitsi" not in user_data:
user_data["jitsi"] = {}
user_data["jitsi"]["enable"] = True
@staticmethod
def disable():
"""Disable Gitea service."""
with WriteUserData() as user_data:
if "jitsi" not in user_data:
user_data["jitsi"] = {}
user_data["jitsi"]["enable"] = False
@staticmethod
def stop():
subprocess.run(["systemctl", "stop", "jitsi-videobridge.service"])
subprocess.run(["systemctl", "stop", "jicofo.service"])
@staticmethod
def start():
subprocess.run(["systemctl", "start", "jitsi-videobridge.service"])
subprocess.run(["systemctl", "start", "jicofo.service"])
@staticmethod
def restart():
subprocess.run(["systemctl", "restart", "jitsi-videobridge.service"])
subprocess.run(["systemctl", "restart", "jicofo.service"])
@staticmethod
def get_configuration():
return {}
@staticmethod
def set_configuration(config_items):
return super().set_configuration(config_items)
@staticmethod
def get_logs():
return ""
@staticmethod
def get_storage_usage() -> int:
storage_usage = 0
storage_usage += get_storage_usage("/var/lib/jitsi-meet")
return storage_usage
@staticmethod
def get_location() -> str:
return "sda1"
@staticmethod
def get_dns_records() -> typing.List[ServiceDnsRecord]:
ip4 = network_utils.get_ip4()
ip6 = network_utils.get_ip6()
return [
ServiceDnsRecord(
type="A",
name="meet",
content=ip4,
ttl=3600,
),
ServiceDnsRecord(
type="AAAA",
name="meet",
content=ip6,
ttl=3600,
),
]
def move_to_volume(self, volume: BlockDevice) -> Job:
raise NotImplementedError("jitsi service is not movable")

View file

@ -0,0 +1,5 @@
JITSI_ICON = """
<svg width="32" height="32" viewBox="0 0 32 32" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M26.6665 2.66663H5.33317C3.8665 2.66663 2.67984 3.86663 2.67984 5.33329L2.6665 29.3333L7.99984 24H26.6665C28.1332 24 29.3332 22.8 29.3332 21.3333V5.33329C29.3332 3.86663 28.1332 2.66663 26.6665 2.66663ZM26.6665 21.3333H6.89317L5.33317 22.8933V5.33329H26.6665V21.3333ZM18.6665 14.1333L22.6665 17.3333V9.33329L18.6665 12.5333V9.33329H9.33317V17.3333H18.6665V14.1333Z" fill="black"/>
</svg>
"""

View file

@ -0,0 +1,179 @@
"""Class representing Dovecot and Postfix services"""
import base64
import subprocess
import typing
from selfprivacy_api.jobs import Job, JobStatus, Jobs
from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service
from selfprivacy_api.services.generic_size_counter import get_storage_usage
from selfprivacy_api.services.generic_status_getter import (
get_service_status,
get_service_status_from_several_units,
)
from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus
import selfprivacy_api.utils as utils
from selfprivacy_api.utils.block_devices import BlockDevice
from selfprivacy_api.utils.huey import huey
import selfprivacy_api.utils.network as network_utils
from selfprivacy_api.services.mailserver.icon import MAILSERVER_ICON
class MailServer(Service):
"""Class representing mail service"""
@staticmethod
def get_id() -> str:
return "mailserver"
@staticmethod
def get_display_name() -> str:
return "Mail Server"
@staticmethod
def get_description() -> str:
return "E-Mail for company and family."
@staticmethod
def get_svg_icon() -> str:
return base64.b64encode(MAILSERVER_ICON.encode("utf-8")).decode("utf-8")
@staticmethod
def get_url() -> typing.Optional[str]:
"""Return service url."""
return None
@staticmethod
def is_movable() -> bool:
return True
@staticmethod
def is_required() -> bool:
return True
@staticmethod
def is_enabled() -> bool:
return True
@staticmethod
def get_status() -> ServiceStatus:
return get_service_status_from_several_units(
["dovecot2.service", "postfix.service"]
)
@staticmethod
def enable():
raise NotImplementedError("enable is not implemented for MailServer")
@staticmethod
def disable():
raise NotImplementedError("disable is not implemented for MailServer")
@staticmethod
def stop():
subprocess.run(["systemctl", "stop", "dovecot2.service"])
subprocess.run(["systemctl", "stop", "postfix.service"])
@staticmethod
def start():
subprocess.run(["systemctl", "start", "dovecot2.service"])
subprocess.run(["systemctl", "start", "postfix.service"])
@staticmethod
def restart():
subprocess.run(["systemctl", "restart", "dovecot2.service"])
subprocess.run(["systemctl", "restart", "postfix.service"])
@staticmethod
def get_configuration():
return {}
@staticmethod
def set_configuration(config_items):
return super().set_configuration(config_items)
@staticmethod
def get_logs():
return ""
@staticmethod
def get_storage_usage() -> int:
return get_storage_usage("/var/vmail")
@staticmethod
def get_location() -> str:
with utils.ReadUserData() as user_data:
if user_data.get("useBinds", False):
return user_data.get("mailserver", {}).get("location", "sda1")
else:
return "sda1"
@staticmethod
def get_dns_records() -> typing.List[ServiceDnsRecord]:
domain = utils.get_domain()
dkim_record = utils.get_dkim_key(domain)
ip4 = network_utils.get_ip4()
ip6 = network_utils.get_ip6()
if dkim_record is None:
return []
return [
ServiceDnsRecord(
type="A",
name=domain,
content=ip4,
ttl=3600,
),
ServiceDnsRecord(
type="AAAA",
name=domain,
content=ip6,
ttl=3600,
),
ServiceDnsRecord(
type="MX", name=domain, content=domain, ttl=3600, priority=10
),
ServiceDnsRecord(
type="TXT", name="_dmarc", content=f"v=DMARC1; p=none", ttl=18000
),
ServiceDnsRecord(
type="TXT",
name=domain,
content=f"v=spf1 a mx ip4:{ip4} -all",
ttl=18000,
),
ServiceDnsRecord(
type="TXT", name="selector._domainkey", content=dkim_record, ttl=18000
),
]
def move_to_volume(self, volume: BlockDevice) -> Job:
job = Jobs.get_instance().add(
type_id="services.mailserver.move",
name="Move Mail Server",
description=f"Moving mailserver data to {volume.name}",
)
move_service(
self,
volume,
job,
[
FolderMoveNames(
name="vmail",
bind_location="/var/vmail",
group="virtualMail",
owner="virtualMail",
),
FolderMoveNames(
name="sieve",
bind_location="/var/sieve",
group="virtualMail",
owner="virtualMail",
),
],
"mailserver",
)
return job

View file

@ -0,0 +1,5 @@
MAILSERVER_ICON = """
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M13.3333 2.66675H2.66665C1.93331 2.66675 1.33998 3.26675 1.33998 4.00008L1.33331 12.0001C1.33331 12.7334 1.93331 13.3334 2.66665 13.3334H13.3333C14.0666 13.3334 14.6666 12.7334 14.6666 12.0001V4.00008C14.6666 3.26675 14.0666 2.66675 13.3333 2.66675ZM13.3333 12.0001H2.66665V5.33341L7.99998 8.66675L13.3333 5.33341V12.0001ZM7.99998 7.33341L2.66665 4.00008H13.3333L7.99998 7.33341Z" fill="black"/>
</svg>
"""

View file

@ -0,0 +1,3 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M13.3333 2.66675H2.66665C1.93331 2.66675 1.33998 3.26675 1.33998 4.00008L1.33331 12.0001C1.33331 12.7334 1.93331 13.3334 2.66665 13.3334H13.3333C14.0666 13.3334 14.6666 12.7334 14.6666 12.0001V4.00008C14.6666 3.26675 14.0666 2.66675 13.3333 2.66675ZM13.3333 12.0001H2.66665V5.33341L7.99998 8.66675L13.3333 5.33341V12.0001ZM7.99998 7.33341L2.66665 4.00008H13.3333L7.99998 7.33341Z" fill="#201A19"/>
</svg>

After

Width:  |  Height:  |  Size: 510 B

View file

@ -1,36 +1,62 @@
"""Class representing Nextcloud service."""
import base64
import subprocess
import psutil
from selfprivacy_api.services.service import Service, ServiceStatus
from selfprivacy_api.utils import ReadUserData, WriteUserData
import typing
from selfprivacy_api.jobs import Job, Jobs
from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service
from selfprivacy_api.services.generic_size_counter import get_storage_usage
from selfprivacy_api.services.generic_status_getter import get_service_status
from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus
from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain
from selfprivacy_api.utils.block_devices import BlockDevice
import selfprivacy_api.utils.network as network_utils
from selfprivacy_api.services.nextcloud.icon import NEXTCLOUD_ICON
class Nextcloud(Service):
"""Class representing Nextcloud service."""
def get_id(self) -> str:
@staticmethod
def get_id() -> str:
"""Return service id."""
return "nextcloud"
def get_display_name(self) -> str:
@staticmethod
def get_display_name() -> str:
"""Return service display name."""
return "Nextcloud"
def get_description(self) -> str:
@staticmethod
def get_description() -> str:
"""Return service description."""
return "Nextcloud is a cloud storage service that offers a web interface and a desktop client."
def get_svg_icon(self) -> str:
@staticmethod
def get_svg_icon() -> str:
"""Read SVG icon from file and return it as base64 encoded string."""
with open("selfprivacy_api/services/nextcloud/nextcloud.svg", "rb") as f:
return base64.b64encode(f.read()).decode("utf-8")
return base64.b64encode(NEXTCLOUD_ICON.encode("utf-8")).decode("utf-8")
def is_enabled(self) -> bool:
@staticmethod
def get_url() -> typing.Optional[str]:
"""Return service url."""
domain = get_domain()
return f"https://cloud.{domain}"
@staticmethod
def is_movable() -> bool:
return True
@staticmethod
def is_required() -> bool:
return False
@staticmethod
def is_enabled() -> bool:
with ReadUserData() as user_data:
return user_data.get("nextcloud", {}).get("enable", False)
def get_status(self) -> ServiceStatus:
@staticmethod
def get_status() -> ServiceStatus:
"""
Return Nextcloud status from systemd.
Use command return code to determine status.
@ -40,57 +66,106 @@ class Nextcloud(Service):
Return code 3 means service is stopped.
Return code 4 means service is off.
"""
service_status = subprocess.Popen(
["systemctl", "status", "phpfpm-nextcloud.service"]
)
service_status.communicate()[0]
if service_status.returncode == 0:
return ServiceStatus.RUNNING
elif service_status.returncode == 1 or service_status.returncode == 2:
return ServiceStatus.ERROR
elif service_status.returncode == 3:
return ServiceStatus.STOPPED
elif service_status.returncode == 4:
return ServiceStatus.OFF
else:
return ServiceStatus.DEGRADED
return get_service_status("phpfpm-nextcloud.service")
def enable(self):
@staticmethod
def enable():
"""Enable Nextcloud service."""
with WriteUserData() as user_data:
if "nextcloud" not in user_data:
user_data["nextcloud"] = {}
user_data["nextcloud"]["enable"] = True
def disable(self):
@staticmethod
def disable():
"""Disable Nextcloud service."""
with WriteUserData() as user_data:
if "nextcloud" not in user_data:
user_data["nextcloud"] = {}
user_data["nextcloud"]["enable"] = False
def stop(self):
@staticmethod
def stop():
"""Stop Nextcloud service."""
subprocess.Popen(["systemctl", "stop", "phpfpm-nextcloud.service"])
def start(self):
@staticmethod
def start():
"""Start Nextcloud service."""
subprocess.Popen(["systemctl", "start", "phpfpm-nextcloud.service"])
def restart(self):
@staticmethod
def restart():
"""Restart Nextcloud service."""
subprocess.Popen(["systemctl", "restart", "phpfpm-nextcloud.service"])
def get_configuration(self) -> dict:
@staticmethod
def get_configuration() -> dict:
"""Return Nextcloud configuration."""
return {}
def set_configuration(self, config_items):
@staticmethod
def set_configuration(config_items):
return super().set_configuration(config_items)
def get_logs(self):
@staticmethod
def get_logs():
"""Return Nextcloud logs."""
return ""
def get_storage_usage(self):
return psutil.disk_usage("/var/lib/nextcloud").used
@staticmethod
def get_storage_usage() -> int:
"""
Calculate the real storage usage of /var/lib/nextcloud and all subdirectories.
Calculate using pathlib.
Do not follow symlinks.
"""
return get_storage_usage("/var/lib/nextcloud")
@staticmethod
def get_location() -> str:
"""Get the name of disk where Nextcloud is installed."""
with ReadUserData() as user_data:
if user_data.get("useBinds", False):
return user_data.get("nextcloud", {}).get("location", "sda1")
else:
return "sda1"
@staticmethod
def get_dns_records() -> typing.List[ServiceDnsRecord]:
return [
ServiceDnsRecord(
type="A",
name="cloud",
content=network_utils.get_ip4(),
ttl=3600,
),
ServiceDnsRecord(
type="AAAA",
name="cloud",
content=network_utils.get_ip6(),
ttl=3600,
),
]
def move_to_volume(self, volume: BlockDevice) -> Job:
job = Jobs.get_instance().add(
type_id="services.nextcloud.move",
name="Move Nextcloud",
description=f"Moving Nextcloud to volume {volume.name}",
)
move_service(
self,
volume,
job,
[
FolderMoveNames(
name="nextcloud",
bind_location="/var/lib/nextcloud",
owner="nextcloud",
group="nextcloud",
),
],
"nextcloud",
)
return job

View file

@ -0,0 +1,12 @@
NEXTCLOUD_ICON = """
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<g clip-path="url(#clip0_51106_4974)">
<path d="M12.018 6.53699C9.518 6.53699 7.418 8.24899 6.777 10.552C6.217 9.31999 4.984 8.44699 3.552 8.44699C2.61116 8.45146 1.71014 8.82726 1.04495 9.49264C0.379754 10.158 0.00420727 11.0591 0 12C0.00420727 12.9408 0.379754 13.842 1.04495 14.5073C1.71014 15.1727 2.61116 15.5485 3.552 15.553C4.984 15.553 6.216 14.679 6.776 13.447C7.417 15.751 9.518 17.463 12.018 17.463C14.505 17.463 16.594 15.77 17.249 13.486C17.818 14.696 19.032 15.553 20.447 15.553C21.3881 15.549 22.2895 15.1734 22.955 14.508C23.6205 13.8425 23.9961 12.9411 24 12C23.9958 11.059 23.6201 10.1577 22.9547 9.49229C22.2893 8.82688 21.388 8.4512 20.447 8.44699C19.031 8.44699 17.817 9.30499 17.248 10.514C16.594 8.22999 14.505 6.53599 12.018 6.53699ZM12.018 8.62199C13.896 8.62199 15.396 10.122 15.396 12C15.396 13.878 13.896 15.378 12.018 15.378C11.5739 15.38 11.1338 15.2939 10.7231 15.1249C10.3124 14.9558 9.93931 14.707 9.62532 14.393C9.31132 14.0789 9.06267 13.7057 8.89373 13.295C8.72478 12.8842 8.63888 12.4441 8.641 12C8.641 10.122 10.141 8.62199 12.018 8.62199ZM3.552 10.532C4.374 10.532 5.019 11.177 5.019 12C5.019 12.823 4.375 13.467 3.552 13.468C3.35871 13.47 3.16696 13.4334 2.988 13.3603C2.80905 13.2872 2.64648 13.1792 2.50984 13.0424C2.3732 12.9057 2.26524 12.7431 2.19229 12.5641C2.11934 12.3851 2.08286 12.1933 2.085 12C2.085 11.177 2.729 10.533 3.552 10.533V10.532ZM20.447 10.532C21.27 10.532 21.915 11.177 21.915 12C21.915 12.823 21.27 13.468 20.447 13.468C20.2537 13.47 20.062 13.4334 19.883 13.3603C19.704 13.2872 19.5415 13.1792 19.4048 13.0424C19.2682 12.9057 19.1602 12.7431 19.0873 12.5641C19.0143 12.3851 18.9779 12.1933 18.98 12C18.98 11.177 19.624 10.533 20.447 10.533V10.532Z" fill="black"/>
</g>
<defs>
<clipPath id="clip0_51106_4974">
<rect width="24" height="24" fill="white"/>
</clipPath>
</defs>
</svg>
"""

View file

@ -0,0 +1,121 @@
"""Class representing ocserv service."""
import base64
import subprocess
import typing
from selfprivacy_api.jobs import Job, Jobs
from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service
from selfprivacy_api.services.generic_size_counter import get_storage_usage
from selfprivacy_api.services.generic_status_getter import get_service_status
from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus
from selfprivacy_api.utils import ReadUserData, WriteUserData
from selfprivacy_api.utils.block_devices import BlockDevice
from selfprivacy_api.services.ocserv.icon import OCSERV_ICON
import selfprivacy_api.utils.network as network_utils
class Ocserv(Service):
"""Class representing ocserv service."""
@staticmethod
def get_id() -> str:
return "ocserv"
@staticmethod
def get_display_name() -> str:
return "OpenConnect VPN"
@staticmethod
def get_description() -> str:
return "OpenConnect VPN to connect your devices and access the internet."
@staticmethod
def get_svg_icon() -> str:
return base64.b64encode(OCSERV_ICON.encode("utf-8")).decode("utf-8")
@staticmethod
def get_url() -> typing.Optional[str]:
"""Return service url."""
return None
@staticmethod
def is_movable() -> bool:
return False
@staticmethod
def is_required() -> bool:
return False
@staticmethod
def is_enabled() -> bool:
with ReadUserData() as user_data:
return user_data.get("ocserv", {}).get("enable", False)
@staticmethod
def get_status() -> ServiceStatus:
return get_service_status("ocserv.service")
@staticmethod
def enable():
with WriteUserData() as user_data:
if "ocserv" not in user_data:
user_data["ocserv"] = {}
user_data["ocserv"]["enable"] = True
@staticmethod
def disable():
with WriteUserData() as user_data:
if "ocserv" not in user_data:
user_data["ocserv"] = {}
user_data["ocserv"]["enable"] = False
@staticmethod
def stop():
subprocess.run(["systemctl", "stop", "ocserv.service"])
@staticmethod
def start():
subprocess.run(["systemctl", "start", "ocserv.service"])
@staticmethod
def restart():
subprocess.run(["systemctl", "restart", "ocserv.service"])
@staticmethod
def get_configuration():
return {}
@staticmethod
def set_configuration(config_items):
return super().set_configuration(config_items)
@staticmethod
def get_logs():
return ""
@staticmethod
def get_location() -> str:
return "sda1"
@staticmethod
def get_dns_records() -> typing.List[ServiceDnsRecord]:
return [
ServiceDnsRecord(
type="A",
name="vpn",
content=network_utils.get_ip4(),
ttl=3600,
),
ServiceDnsRecord(
type="AAAA",
name="vpn",
content=network_utils.get_ip6(),
ttl=3600,
),
]
@staticmethod
def get_storage_usage() -> int:
return 0
def move_to_volume(self, volume: BlockDevice) -> Job:
raise NotImplementedError("ocserv service is not movable")

View file

@ -0,0 +1,5 @@
OCSERV_ICON = """
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M12 1L3 5V11C3 16.55 6.84 21.74 12 23C17.16 21.74 21 16.55 21 11V5L12 1ZM12 11.99H19C18.47 16.11 15.72 19.78 12 20.93V12H5V6.3L12 3.19V11.99Z" fill="black"/>
</svg>
"""

View file

@ -0,0 +1,3 @@
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M12 1L3 5V11C3 16.55 6.84 21.74 12 23C17.16 21.74 21 16.55 21 11V5L12 1ZM12 11.99H19C18.47 16.11 15.72 19.78 12 20.93V12H5V6.3L12 3.19V11.99Z" fill="black"/>
</svg>

After

Width:  |  Height:  |  Size: 270 B

View file

@ -0,0 +1,157 @@
"""Class representing Nextcloud service."""
import base64
import subprocess
import typing
from selfprivacy_api.jobs import Job, Jobs
from selfprivacy_api.services.generic_service_mover import FolderMoveNames, move_service
from selfprivacy_api.services.generic_size_counter import get_storage_usage
from selfprivacy_api.services.generic_status_getter import get_service_status
from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus
from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain
from selfprivacy_api.utils.block_devices import BlockDevice
import selfprivacy_api.utils.network as network_utils
from selfprivacy_api.services.pleroma.icon import PLEROMA_ICON
class Pleroma(Service):
"""Class representing Pleroma service."""
@staticmethod
def get_id() -> str:
return "pleroma"
@staticmethod
def get_display_name() -> str:
return "Pleroma"
@staticmethod
def get_description() -> str:
return "Pleroma is a microblogging service that offers a web interface and a desktop client."
@staticmethod
def get_svg_icon() -> str:
return base64.b64encode(PLEROMA_ICON.encode("utf-8")).decode("utf-8")
@staticmethod
def get_url() -> typing.Optional[str]:
"""Return service url."""
domain = get_domain()
return f"https://social.{domain}"
@staticmethod
def is_movable() -> bool:
return True
@staticmethod
def is_required() -> bool:
return False
@staticmethod
def is_enabled() -> bool:
with ReadUserData() as user_data:
return user_data.get("pleroma", {}).get("enable", False)
@staticmethod
def get_status() -> ServiceStatus:
return get_service_status("pleroma.service")
@staticmethod
def enable():
with WriteUserData() as user_data:
if "pleroma" not in user_data:
user_data["pleroma"] = {}
user_data["pleroma"]["enable"] = True
@staticmethod
def disable():
with WriteUserData() as user_data:
if "pleroma" not in user_data:
user_data["pleroma"] = {}
user_data["pleroma"]["enable"] = False
@staticmethod
def stop():
subprocess.run(["systemctl", "stop", "pleroma.service"])
subprocess.run(["systemctl", "stop", "postgresql.service"])
@staticmethod
def start():
subprocess.run(["systemctl", "start", "pleroma.service"])
subprocess.run(["systemctl", "start", "postgresql.service"])
@staticmethod
def restart():
subprocess.run(["systemctl", "restart", "pleroma.service"])
subprocess.run(["systemctl", "restart", "postgresql.service"])
@staticmethod
def get_configuration(config_items):
return {}
@staticmethod
def set_configuration(config_items):
return super().set_configuration(config_items)
@staticmethod
def get_logs():
return ""
@staticmethod
def get_storage_usage() -> int:
storage_usage = 0
storage_usage += get_storage_usage("/var/lib/pleroma")
storage_usage += get_storage_usage("/var/lib/postgresql")
return storage_usage
@staticmethod
def get_location() -> str:
with ReadUserData() as user_data:
if user_data.get("useBinds", False):
return user_data.get("pleroma", {}).get("location", "sda1")
else:
return "sda1"
@staticmethod
def get_dns_records() -> typing.List[ServiceDnsRecord]:
return [
ServiceDnsRecord(
type="A",
name="social",
content=network_utils.get_ip4(),
ttl=3600,
),
ServiceDnsRecord(
type="AAAA",
name="social",
content=network_utils.get_ip6(),
ttl=3600,
),
]
def move_to_volume(self, volume: BlockDevice) -> Job:
job = Jobs.get_instance().add(
type_id="services.pleroma.move",
name="Move Pleroma",
description=f"Moving Pleroma to volume {volume.name}",
)
move_service(
self,
volume,
job,
[
FolderMoveNames(
name="pleroma",
bind_location="/var/lib/pleroma",
owner="pleroma",
group="pleroma",
),
FolderMoveNames(
name="postgresql",
bind_location="/var/lib/postgresql",
owner="postgres",
group="postgres",
),
],
"pleroma",
)
return job

View file

@ -0,0 +1,12 @@
PLEROMA_ICON = """
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<g clip-path="url(#clip0_51106_4998)">
<path d="M6.35999 1.07076e-06C6.11451 -0.000261753 5.87139 0.0478616 5.64452 0.14162C5.41766 0.235378 5.21149 0.372932 5.03782 0.546418C4.86415 0.719904 4.72638 0.925919 4.63237 1.15269C4.53837 1.37945 4.48999 1.62252 4.48999 1.868V24H10.454V1.07076e-06H6.35999ZM13.473 1.07076e-06V12H17.641C18.1364 12 18.6115 11.8032 18.9619 11.4529C19.3122 11.1026 19.509 10.6274 19.509 10.132V1.07076e-06H13.473ZM13.473 18.036V24H17.641C18.1364 24 18.6115 23.8032 18.9619 23.4529C19.3122 23.1026 19.509 22.6274 19.509 22.132V18.036H13.473Z" fill="black"/>
</g>
<defs>
<clipPath id="clip0_51106_4998">
<rect width="24" height="24" fill="white"/>
</clipPath>
</defs>
</svg>
"""

View file

@ -0,0 +1,10 @@
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<g clip-path="url(#clip0_51106_4998)">
<path d="M6.35999 1.07076e-06C6.11451 -0.000261753 5.87139 0.0478616 5.64452 0.14162C5.41766 0.235378 5.21149 0.372932 5.03782 0.546418C4.86415 0.719904 4.72638 0.925919 4.63237 1.15269C4.53837 1.37945 4.48999 1.62252 4.48999 1.868V24H10.454V1.07076e-06H6.35999ZM13.473 1.07076e-06V12H17.641C18.1364 12 18.6115 11.8032 18.9619 11.4529C19.3122 11.1026 19.509 10.6274 19.509 10.132V1.07076e-06H13.473ZM13.473 18.036V24H17.641C18.1364 24 18.6115 23.8032 18.9619 23.4529C19.3122 23.1026 19.509 22.6274 19.509 22.132V18.036H13.473Z" fill="black"/>
</g>
<defs>
<clipPath id="clip0_51106_4998">
<rect width="24" height="24" fill="white"/>
</clipPath>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 794 B

View file

@ -3,23 +3,30 @@ from abc import ABC, abstractmethod
from enum import Enum
import typing
from pydantic import BaseModel
from selfprivacy_api.jobs import Job
from selfprivacy_api.utils.block_devices import BlockDevice
class ServiceStatus(Enum):
"""Enum for service status"""
RUNNING = "RUNNING"
DEGRADED = "DEGRADED"
ERROR = "ERROR"
STOPPED = "STOPPED"
ACTIVE = "ACTIVE"
RELOADING = "RELOADING"
INACTIVE = "INACTIVE"
FAILED = "FAILED"
ACTIVATING = "ACTIVATING"
DEACTIVATING = "DEACTIVATING"
OFF = "OFF"
class ServiceDnsRecord:
class ServiceDnsRecord(BaseModel):
type: str
name: str
content: str
ttl: int
priority: typing.Optional[int]
priority: typing.Optional[int] = None
class Service(ABC):
@ -28,66 +35,106 @@ class Service(ABC):
can be installed, configured and used by a user.
"""
@staticmethod
@abstractmethod
def get_id(self) -> str:
def get_id() -> str:
pass
@staticmethod
@abstractmethod
def get_display_name() -> str:
pass
@staticmethod
@abstractmethod
def get_description() -> str:
pass
@staticmethod
@abstractmethod
def get_svg_icon() -> str:
pass
@staticmethod
@abstractmethod
def get_url() -> typing.Optional[str]:
pass
@staticmethod
@abstractmethod
def is_movable() -> bool:
pass
@staticmethod
@abstractmethod
def is_required() -> bool:
pass
@staticmethod
@abstractmethod
def is_enabled() -> bool:
pass
@staticmethod
@abstractmethod
def get_status() -> ServiceStatus:
pass
@staticmethod
@abstractmethod
def enable():
pass
@staticmethod
@abstractmethod
def disable():
pass
@staticmethod
@abstractmethod
def stop():
pass
@staticmethod
@abstractmethod
def start():
pass
@staticmethod
@abstractmethod
def restart():
pass
@staticmethod
@abstractmethod
def get_configuration():
pass
@staticmethod
@abstractmethod
def set_configuration(config_items):
pass
@staticmethod
@abstractmethod
def get_logs():
pass
@staticmethod
@abstractmethod
def get_storage_usage() -> int:
pass
@staticmethod
@abstractmethod
def get_dns_records() -> typing.List[ServiceDnsRecord]:
pass
@staticmethod
@abstractmethod
def get_location() -> str:
pass
@abstractmethod
def get_display_name(self) -> str:
pass
@abstractmethod
def get_description(self) -> str:
pass
@abstractmethod
def get_svg_icon(self) -> str:
pass
@abstractmethod
def is_enabled(self) -> bool:
pass
@abstractmethod
def get_status(self) -> ServiceStatus:
pass
@abstractmethod
def enable(self):
pass
@abstractmethod
def disable(self):
pass
@abstractmethod
def stop(self):
pass
@abstractmethod
def start(self):
pass
@abstractmethod
def restart(self):
pass
@abstractmethod
def get_configuration(self):
pass
@abstractmethod
def set_configuration(self, config_items):
pass
@abstractmethod
def get_logs(self):
pass
@abstractmethod
def get_storage_usage(self):
pass
@abstractmethod
def get_dns_records(self) -> typing.List[ServiceDnsRecord]:
def move_to_volume(self, volume: BlockDevice) -> Job:
pass

View file

@ -0,0 +1,4 @@
from selfprivacy_api.utils.huey import huey
from selfprivacy_api.jobs.test import test_job
from selfprivacy_api.restic_controller.tasks import *
from selfprivacy_api.services.generic_service_mover import move_service

View file

@ -10,6 +10,7 @@ import portalocker
USERDATA_FILE = "/etc/nixos/userdata/userdata.json"
TOKENS_FILE = "/etc/nixos/userdata/tokens.json"
JOBS_FILE = "/etc/nixos/userdata/jobs.json"
DOMAIN_FILE = "/var/domain"
@ -18,6 +19,7 @@ class UserDataFiles(Enum):
USERDATA = 0
TOKENS = 1
JOBS = 2
def get_domain():
@ -35,6 +37,12 @@ class WriteUserData(object):
self.userdata_file = open(USERDATA_FILE, "r+", encoding="utf-8")
elif file_type == UserDataFiles.TOKENS:
self.userdata_file = open(TOKENS_FILE, "r+", encoding="utf-8")
elif file_type == UserDataFiles.JOBS:
# Make sure file exists
if not os.path.exists(JOBS_FILE):
with open(JOBS_FILE, "w", encoding="utf-8") as jobs_file:
jobs_file.write("{}")
self.userdata_file = open(JOBS_FILE, "r+", encoding="utf-8")
else:
raise ValueError("Unknown file type")
portalocker.lock(self.userdata_file, portalocker.LOCK_EX)
@ -60,6 +68,12 @@ class ReadUserData(object):
self.userdata_file = open(USERDATA_FILE, "r", encoding="utf-8")
elif file_type == UserDataFiles.TOKENS:
self.userdata_file = open(TOKENS_FILE, "r", encoding="utf-8")
elif file_type == UserDataFiles.JOBS:
# Make sure file exists
if not os.path.exists(JOBS_FILE):
with open(JOBS_FILE, "w", encoding="utf-8") as jobs_file:
jobs_file.write("{}")
self.userdata_file = open(JOBS_FILE, "r", encoding="utf-8")
else:
raise ValueError("Unknown file type")
portalocker.lock(self.userdata_file, portalocker.LOCK_SH)

View file

@ -5,6 +5,7 @@ from datetime import datetime, timedelta
import re
import typing
from pydantic import BaseModel
from mnemonic import Mnemonic
from . import ReadUserData, UserDataFiles, WriteUserData, parse_date
@ -87,7 +88,7 @@ def is_token_name_pair_valid(token_name, token):
return False
def get_token_name(token):
def get_token_name(token: str) -> typing.Optional[str]:
"""Return the name of the token provided"""
with ReadUserData(UserDataFiles.TOKENS) as tokens:
for t in tokens["tokens"]:
@ -96,11 +97,22 @@ def get_token_name(token):
return None
class BasicTokenInfo(BaseModel):
"""Token info"""
name: str
date: datetime
def get_tokens_info():
"""Get all tokens info without tokens themselves"""
with ReadUserData(UserDataFiles.TOKENS) as tokens:
return [
{"name": token["name"], "date": token["date"]} for token in tokens["tokens"]
BasicTokenInfo(
name=t["name"],
date=parse_date(t["date"]),
)
for t in tokens["tokens"]
]

View file

@ -16,13 +16,13 @@ def get_block_device(device_name):
"-J",
"-b",
"-o",
"NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINT,LABEL,UUID,SIZE, MODEL,SERIAL,TYPE",
device_name,
"NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINTS,LABEL,UUID,SIZE,MODEL,SERIAL,TYPE",
f"/dev/{device_name}",
]
)
lsblk_output = lsblk_output.decode("utf-8")
lsblk_output = json.loads(lsblk_output)
return lsblk_output["blockdevices"]
return lsblk_output["blockdevices"][0]
def resize_block_device(block_device) -> bool:
@ -30,9 +30,11 @@ def resize_block_device(block_device) -> bool:
Resize a block device. Return True if successful.
"""
resize_command = ["resize2fs", block_device]
resize_process = subprocess.Popen(resize_command, shell=False)
resize_process.communicate()
return resize_process.returncode == 0
try:
subprocess.check_output(resize_command, shell=False)
except subprocess.CalledProcessError:
return False
return True
class BlockDevice:
@ -43,14 +45,14 @@ class BlockDevice:
def __init__(self, block_device):
self.name = block_device["name"]
self.path = block_device["path"]
self.fsavail = block_device["fsavail"]
self.fssize = block_device["fssize"]
self.fsavail = str(block_device["fsavail"])
self.fssize = str(block_device["fssize"])
self.fstype = block_device["fstype"]
self.fsused = block_device["fsused"]
self.mountpoint = block_device["mountpoint"]
self.fsused = str(block_device["fsused"])
self.mountpoints = block_device["mountpoints"]
self.label = block_device["label"]
self.uuid = block_device["uuid"]
self.size = block_device["size"]
self.size = str(block_device["size"])
self.model = block_device["model"]
self.serial = block_device["serial"]
self.type = block_device["type"]
@ -60,7 +62,7 @@ class BlockDevice:
return self.name
def __repr__(self):
return f"<BlockDevice {self.name} of size {self.size} mounted at {self.mountpoint}>"
return f"<BlockDevice {self.name} of size {self.size} mounted at {self.mountpoints}>"
def __eq__(self, other):
return self.name == other.name
@ -73,14 +75,14 @@ class BlockDevice:
Update current data and return a dictionary of stats.
"""
device = get_block_device(self.name)
self.fsavail = device["fsavail"]
self.fssize = device["fssize"]
self.fsavail = str(device["fsavail"])
self.fssize = str(device["fssize"])
self.fstype = device["fstype"]
self.fsused = device["fsused"]
self.mountpoint = device["mountpoint"]
self.fsused = str(device["fsused"])
self.mountpoints = device["mountpoints"]
self.label = device["label"]
self.uuid = device["uuid"]
self.size = device["size"]
self.size = str(device["size"])
self.model = device["model"]
self.serial = device["serial"]
self.type = device["type"]
@ -92,7 +94,7 @@ class BlockDevice:
"fssize": self.fssize,
"fstype": self.fstype,
"fsused": self.fsused,
"mountpoint": self.mountpoint,
"mountpoints": self.mountpoints,
"label": self.label,
"uuid": self.uuid,
"size": self.size,
@ -170,7 +172,7 @@ class BlockDevices:
"-J",
"-b",
"-o",
"NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINT,LABEL,UUID,SIZE,MODEL,SERIAL,TYPE",
"NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINTS,LABEL,UUID,SIZE,MODEL,SERIAL,TYPE",
]
)
lsblk_output = lsblk_output.decode("utf-8")
@ -219,6 +221,6 @@ class BlockDevices:
"""
block_devices = []
for block_device in self.block_devices:
if block_device.mountpoint == mountpoint:
if mountpoint in block_device.mountpoints:
block_devices.append(block_device)
return block_devices

View file

@ -0,0 +1,14 @@
"""MiniHuey singleton."""
import os
from huey import SqliteHuey
HUEY_DATABASE = "/etc/nixos/userdata/tasks.db"
# Singleton instance containing the huey database.
test_mode = os.environ.get("TEST_MODE")
huey = SqliteHuey(
HUEY_DATABASE,
immediate=test_mode == "true",
)

View file

@ -2,9 +2,10 @@
"""Network utils"""
import subprocess
import re
from typing import Optional
def get_ip4():
def get_ip4() -> str:
"""Get IPv4 address"""
try:
ip4 = subprocess.check_output(["ip", "addr", "show", "dev", "eth0"]).decode(
@ -13,10 +14,10 @@ def get_ip4():
ip4 = re.search(r"inet (\d+\.\d+\.\d+\.\d+)\/\d+", ip4)
except subprocess.CalledProcessError:
ip4 = None
return ip4.group(1) if ip4 else None
return ip4.group(1) if ip4 else ""
def get_ip6():
def get_ip6() -> str:
"""Get IPv6 address"""
try:
ip6 = subprocess.check_output(["ip", "addr", "show", "dev", "eth0"]).decode(
@ -25,4 +26,4 @@ def get_ip6():
ip6 = re.search(r"inet6 (\S+)\/\d+", ip6)
except subprocess.CalledProcessError:
ip6 = None
return ip6.group(1) if ip6 else None
return ip6.group(1) if ip6 else ""

View file

@ -2,7 +2,7 @@ from setuptools import setup, find_packages
setup(
name="selfprivacy_api",
version="1.2.7",
version="2.0.0",
packages=find_packages(),
scripts=[
"selfprivacy_api/app.py",

View file

@ -1,12 +1,8 @@
{ pkgs ? import <nixpkgs> { } }:
let
sp-python = pkgs.python39.withPackages (p: with p; [
flask
flask-restful
setuptools
portalocker
flask-swagger
flask-swagger-ui
pytz
pytest
pytest-mock
@ -18,9 +14,10 @@ let
pylint
pydantic
typing-extensions
flask-cors
psutil
black
fastapi
uvicorn
(buildPythonPackage rec {
pname = "strawberry-graphql";
version = "0.123.0";
@ -32,11 +29,11 @@ let
typing-extensions
python-multipart
python-dateutil
flask
# flask
pydantic
pygments
poetry
flask-cors
# flask-cors
(buildPythonPackage rec {
pname = "graphql-core";
version = "3.2.0";

View file

@ -1,9 +1,13 @@
"""Tests configuration."""
# pylint: disable=redefined-outer-name
# pylint: disable=unused-argument
import os
import pytest
from flask import testing
from selfprivacy_api.app import create_app
from fastapi.testclient import TestClient
def pytest_generate_tests(metafunc):
os.environ["TEST_MODE"] = "true"
@pytest.fixture
@ -16,66 +20,43 @@ def tokens_file(mocker, shared_datadir):
@pytest.fixture
def app():
"""Flask application."""
app = create_app(
{
"ENABLE_SWAGGER": "1",
}
def jobs_file(mocker, shared_datadir):
"""Mock tokens file."""
mock = mocker.patch("selfprivacy_api.utils.JOBS_FILE", shared_datadir / "jobs.json")
return mock
@pytest.fixture
def huey_database(mocker, shared_datadir):
"""Mock huey database."""
mock = mocker.patch(
"selfprivacy_api.utils.huey.HUEY_DATABASE", shared_datadir / "huey.db"
)
yield app
return mock
@pytest.fixture
def client(app, tokens_file):
"""Flask unauthorized test client."""
return app.test_client()
def client(tokens_file, huey_database, jobs_file):
from selfprivacy_api.app import app
class AuthorizedClient(testing.FlaskClient):
"""Flask authorized test client."""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.token = "TEST_TOKEN"
def open(self, *args, **kwargs):
if "headers" not in kwargs:
kwargs["headers"] = {}
kwargs["headers"]["Authorization"] = f"Bearer {self.token}"
return super().open(*args, **kwargs)
class WrongAuthClient(testing.FlaskClient):
"""Flask client with wrong token"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.token = "WRONG_TOKEN"
def open(self, *args, **kwargs):
if "headers" not in kwargs:
kwargs["headers"] = {}
kwargs["headers"]["Authorization"] = f"Bearer {self.token}"
return super().open(*args, **kwargs)
return TestClient(app)
@pytest.fixture
def authorized_client(app, tokens_file):
def authorized_client(tokens_file, huey_database, jobs_file):
"""Authorized test client fixture."""
app.test_client_class = AuthorizedClient
return app.test_client()
from selfprivacy_api.app import app
client = TestClient(app)
client.headers.update({"Authorization": "Bearer TEST_TOKEN"})
return client
@pytest.fixture
def wrong_auth_client(app, tokens_file):
def wrong_auth_client(tokens_file, huey_database, jobs_file):
"""Wrong token test client fixture."""
app.test_client_class = WrongAuthClient
return app.test_client()
from selfprivacy_api.app import app
@pytest.fixture
def runner(app, tokens_file):
"""Flask test runner."""
return app.test_cli_runner()
client = TestClient(app)
client.headers.update({"Authorization": "Bearer WRONG_TOKEN"})
return client

1
tests/data/jobs.json Normal file
View file

@ -0,0 +1 @@
{}

View file

@ -0,0 +1,490 @@
#!/usr/bin/env python3
# pylint: disable=redefined-outer-name
# pylint: disable=unused-argument
# pylint: disable=missing-function-docstring
import json
import subprocess
import pytest
from selfprivacy_api.utils.block_devices import (
BlockDevice,
BlockDevices,
get_block_device,
resize_block_device,
)
from tests.common import read_json
SINGLE_LSBLK_OUTPUT = b"""
{
"blockdevices": [
{
"name": "sda1",
"path": "/dev/sda1",
"fsavail": "4614107136",
"fssize": "19814920192",
"fstype": "ext4",
"fsused": "14345314304",
"mountpoints": [
"/nix/store", "/"
],
"label": null,
"uuid": "ec80c004-baec-4a2c-851d-0e1807135511",
"size": 20210236928,
"model": null,
"serial": null,
"type": "part"
}
]
}
"""
@pytest.fixture
def lsblk_singular_mock(mocker):
mock = mocker.patch(
"subprocess.check_output", autospec=True, return_value=SINGLE_LSBLK_OUTPUT
)
return mock
@pytest.fixture
def failed_check_output_mock(mocker):
mock = mocker.patch(
"subprocess.check_output",
autospec=True,
side_effect=subprocess.CalledProcessError(
returncode=1, cmd=["some", "command"]
),
)
return mock
@pytest.fixture
def only_root_in_userdata(mocker, datadir):
mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "only_root.json")
assert read_json(datadir / "only_root.json")["volumes"][0]["device"] == "/dev/sda1"
assert (
read_json(datadir / "only_root.json")["volumes"][0]["mountPoint"]
== "/volumes/sda1"
)
assert read_json(datadir / "only_root.json")["volumes"][0]["filesystem"] == "ext4"
return datadir
@pytest.fixture
def no_devices_in_userdata(mocker, datadir):
mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "no_devices.json")
assert read_json(datadir / "no_devices.json")["volumes"] == []
return datadir
@pytest.fixture
def undefined_devices_in_userdata(mocker, datadir):
mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "undefined.json")
assert "volumes" not in read_json(datadir / "undefined.json")
return datadir
def test_create_block_device_object(lsblk_singular_mock, authorized_client):
output = get_block_device("sda1")
assert lsblk_singular_mock.call_count == 1
assert lsblk_singular_mock.call_args[0][0] == [
"lsblk",
"-J",
"-b",
"-o",
"NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINTS,LABEL,UUID,SIZE,MODEL,SERIAL,TYPE",
"/dev/sda1",
]
assert output == json.loads(SINGLE_LSBLK_OUTPUT)["blockdevices"][0]
def test_resize_block_device(lsblk_singular_mock, authorized_client):
result = resize_block_device("sdb")
assert result is True
assert lsblk_singular_mock.call_count == 1
assert lsblk_singular_mock.call_args[0][0] == [
"resize2fs",
"sdb",
]
def test_resize_block_device_failed(failed_check_output_mock, authorized_client):
result = resize_block_device("sdb")
assert result is False
assert failed_check_output_mock.call_count == 1
assert failed_check_output_mock.call_args[0][0] == [
"resize2fs",
"sdb",
]
VOLUME_LSBLK_OUTPUT = b"""
{
"blockdevices": [
{
"name": "sdb",
"path": "/dev/sdb",
"fsavail": "11888545792",
"fssize": "12573614080",
"fstype": "ext4",
"fsused": "24047616",
"mountpoints": [
"/volumes/sdb"
],
"label": null,
"uuid": "fa9d0026-ee23-4047-b8b1-297ae16fa751",
"size": 12884901888,
"model": "Volume",
"serial": "21378102",
"type": "disk"
}
]
}
"""
def test_create_block_device(lsblk_singular_mock, authorized_client):
block_device = BlockDevice(json.loads(VOLUME_LSBLK_OUTPUT)["blockdevices"][0])
assert block_device.name == "sdb"
assert block_device.path == "/dev/sdb"
assert block_device.fsavail == "11888545792"
assert block_device.fssize == "12573614080"
assert block_device.fstype == "ext4"
assert block_device.fsused == "24047616"
assert block_device.mountpoints == ["/volumes/sdb"]
assert block_device.label is None
assert block_device.uuid == "fa9d0026-ee23-4047-b8b1-297ae16fa751"
assert block_device.size == "12884901888"
assert block_device.model == "Volume"
assert block_device.serial == "21378102"
assert block_device.type == "disk"
assert block_device.locked is False
assert str(block_device) == "sdb"
assert (
repr(block_device)
== "<BlockDevice sdb of size 12884901888 mounted at ['/volumes/sdb']>"
)
assert hash(block_device) == hash("sdb")
def test_block_devices_equal(lsblk_singular_mock, authorized_client):
block_device = BlockDevice(json.loads(VOLUME_LSBLK_OUTPUT)["blockdevices"][0])
block_device2 = BlockDevice(json.loads(VOLUME_LSBLK_OUTPUT)["blockdevices"][0])
assert block_device == block_device2
@pytest.fixture
def resize_block_mock(mocker):
mock = mocker.patch(
"selfprivacy_api.utils.block_devices.resize_block_device",
autospec=True,
return_value=True,
)
return mock
def test_call_resize_from_block_device(
lsblk_singular_mock, resize_block_mock, authorized_client
):
block_device = BlockDevice(json.loads(VOLUME_LSBLK_OUTPUT)["blockdevices"][0])
block_device.resize()
assert resize_block_mock.call_count == 1
assert resize_block_mock.call_args[0][0] == "/dev/sdb"
assert lsblk_singular_mock.call_count == 0
def test_get_stats_from_block_device(lsblk_singular_mock, authorized_client):
block_device = BlockDevice(json.loads(SINGLE_LSBLK_OUTPUT)["blockdevices"][0])
stats = block_device.stats()
assert stats == {
"name": "sda1",
"path": "/dev/sda1",
"fsavail": "4614107136",
"fssize": "19814920192",
"fstype": "ext4",
"fsused": "14345314304",
"mountpoints": ["/nix/store", "/"],
"label": None,
"uuid": "ec80c004-baec-4a2c-851d-0e1807135511",
"size": "20210236928",
"model": None,
"serial": None,
"type": "part",
}
assert lsblk_singular_mock.call_count == 1
assert lsblk_singular_mock.call_args[0][0] == [
"lsblk",
"-J",
"-b",
"-o",
"NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINTS,LABEL,UUID,SIZE,MODEL,SERIAL,TYPE",
"/dev/sda1",
]
def test_mount_block_device(
lsblk_singular_mock, only_root_in_userdata, authorized_client
):
block_device = BlockDevice(json.loads(SINGLE_LSBLK_OUTPUT)["blockdevices"][0])
result = block_device.mount()
assert result is False
volume = BlockDevice(json.loads(VOLUME_LSBLK_OUTPUT)["blockdevices"][0])
result = volume.mount()
assert result is True
assert (
read_json(only_root_in_userdata / "only_root.json")["volumes"][1]["device"]
== "/dev/sdb"
)
assert (
read_json(only_root_in_userdata / "only_root.json")["volumes"][1]["mountPoint"]
== "/volumes/sdb"
)
assert (
read_json(only_root_in_userdata / "only_root.json")["volumes"][1]["fsType"]
== "ext4"
)
def test_mount_block_device_when_undefined(
lsblk_singular_mock, undefined_devices_in_userdata, authorized_client
):
block_device = BlockDevice(json.loads(SINGLE_LSBLK_OUTPUT)["blockdevices"][0])
result = block_device.mount()
assert result is True
assert (
read_json(undefined_devices_in_userdata / "undefined.json")["volumes"][0][
"device"
]
== "/dev/sda1"
)
assert (
read_json(undefined_devices_in_userdata / "undefined.json")["volumes"][0][
"mountPoint"
]
== "/volumes/sda1"
)
assert (
read_json(undefined_devices_in_userdata / "undefined.json")["volumes"][0][
"fsType"
]
== "ext4"
)
def test_unmount_block_device(
lsblk_singular_mock, only_root_in_userdata, authorized_client
):
block_device = BlockDevice(json.loads(SINGLE_LSBLK_OUTPUT)["blockdevices"][0])
result = block_device.unmount()
assert result is True
volume = BlockDevice(json.loads(VOLUME_LSBLK_OUTPUT)["blockdevices"][0])
result = volume.unmount()
assert result is False
assert len(read_json(only_root_in_userdata / "only_root.json")["volumes"]) == 0
def test_unmount_block_device_when_undefined(
lsblk_singular_mock, undefined_devices_in_userdata, authorized_client
):
block_device = BlockDevice(json.loads(SINGLE_LSBLK_OUTPUT)["blockdevices"][0])
result = block_device.unmount()
assert result is False
assert (
len(read_json(undefined_devices_in_userdata / "undefined.json")["volumes"]) == 0
)
FULL_LSBLK_OUTPUT = b"""
{
"blockdevices": [
{
"name": "sda",
"path": "/dev/sda",
"fsavail": null,
"fssize": null,
"fstype": null,
"fsused": null,
"mountpoints": [
null
],
"label": null,
"uuid": null,
"size": 20480786432,
"model": "QEMU HARDDISK",
"serial": "drive-scsi0-0-0-0",
"type": "disk",
"children": [
{
"name": "sda1",
"path": "/dev/sda1",
"fsavail": "4605702144",
"fssize": "19814920192",
"fstype": "ext4",
"fsused": "14353719296",
"mountpoints": [
"/nix/store", "/"
],
"label": null,
"uuid": "ec80c004-baec-4a2c-851d-0e1807135511",
"size": 20210236928,
"model": null,
"serial": null,
"type": "part"
},{
"name": "sda14",
"path": "/dev/sda14",
"fsavail": null,
"fssize": null,
"fstype": null,
"fsused": null,
"mountpoints": [
null
],
"label": null,
"uuid": null,
"size": 1048576,
"model": null,
"serial": null,
"type": "part"
},{
"name": "sda15",
"path": "/dev/sda15",
"fsavail": null,
"fssize": null,
"fstype": "vfat",
"fsused": null,
"mountpoints": [
null
],
"label": null,
"uuid": "6B29-5BA7",
"size": 268435456,
"model": null,
"serial": null,
"type": "part"
}
]
},{
"name": "sdb",
"path": "/dev/sdb",
"fsavail": "11888545792",
"fssize": "12573614080",
"fstype": "ext4",
"fsused": "24047616",
"mountpoints": [
"/volumes/sdb"
],
"label": null,
"uuid": "fa9d0026-ee23-4047-b8b1-297ae16fa751",
"size": 12884901888,
"model": "Volume",
"serial": "21378102",
"type": "disk"
},{
"name": "sr0",
"path": "/dev/sr0",
"fsavail": null,
"fssize": null,
"fstype": null,
"fsused": null,
"mountpoints": [
null
],
"label": null,
"uuid": null,
"size": 1073741312,
"model": "QEMU DVD-ROM",
"serial": "QM00003",
"type": "rom"
}
]
}
"""
@pytest.fixture
def lsblk_full_mock(mocker):
mock = mocker.patch(
"subprocess.check_output", autospec=True, return_value=FULL_LSBLK_OUTPUT
)
return mock
def test_get_block_devices(lsblk_full_mock, authorized_client):
block_devices = BlockDevices().get_block_devices()
assert len(block_devices) == 2
assert block_devices[0].name == "sda1"
assert block_devices[0].path == "/dev/sda1"
assert block_devices[0].fsavail == "4605702144"
assert block_devices[0].fssize == "19814920192"
assert block_devices[0].fstype == "ext4"
assert block_devices[0].fsused == "14353719296"
assert block_devices[0].mountpoints == ["/nix/store", "/"]
assert block_devices[0].label is None
assert block_devices[0].uuid == "ec80c004-baec-4a2c-851d-0e1807135511"
assert block_devices[0].size == "20210236928"
assert block_devices[0].model is None
assert block_devices[0].serial is None
assert block_devices[0].type == "part"
assert block_devices[1].name == "sdb"
assert block_devices[1].path == "/dev/sdb"
assert block_devices[1].fsavail == "11888545792"
assert block_devices[1].fssize == "12573614080"
assert block_devices[1].fstype == "ext4"
assert block_devices[1].fsused == "24047616"
assert block_devices[1].mountpoints == ["/volumes/sdb"]
assert block_devices[1].label is None
assert block_devices[1].uuid == "fa9d0026-ee23-4047-b8b1-297ae16fa751"
assert block_devices[1].size == "12884901888"
assert block_devices[1].model == "Volume"
assert block_devices[1].serial == "21378102"
assert block_devices[1].type == "disk"
def test_get_block_device(lsblk_full_mock, authorized_client):
block_device = BlockDevices().get_block_device("sda1")
assert block_device is not None
assert block_device.name == "sda1"
assert block_device.path == "/dev/sda1"
assert block_device.fsavail == "4605702144"
assert block_device.fssize == "19814920192"
assert block_device.fstype == "ext4"
assert block_device.fsused == "14353719296"
assert block_device.mountpoints == ["/nix/store", "/"]
assert block_device.label is None
assert block_device.uuid == "ec80c004-baec-4a2c-851d-0e1807135511"
assert block_device.size == "20210236928"
assert block_device.model is None
assert block_device.serial is None
assert block_device.type == "part"
def test_get_nonexistent_block_device(lsblk_full_mock, authorized_client):
block_device = BlockDevices().get_block_device("sda2")
assert block_device is None
def test_get_block_devices_by_mountpoint(lsblk_full_mock, authorized_client):
block_devices = BlockDevices().get_block_devices_by_mountpoint("/nix/store")
assert len(block_devices) == 1
assert block_devices[0].name == "sda1"
assert block_devices[0].path == "/dev/sda1"
assert block_devices[0].fsavail == "4605702144"
assert block_devices[0].fssize == "19814920192"
assert block_devices[0].fstype == "ext4"
assert block_devices[0].fsused == "14353719296"
assert block_devices[0].mountpoints == ["/nix/store", "/"]
assert block_devices[0].label is None
assert block_devices[0].uuid == "ec80c004-baec-4a2c-851d-0e1807135511"
assert block_devices[0].size == "20210236928"
assert block_devices[0].model is None
assert block_devices[0].serial is None
assert block_devices[0].type == "part"
def test_get_block_devices_by_mountpoint_no_match(lsblk_full_mock, authorized_client):
block_devices = BlockDevices().get_block_devices_by_mountpoint("/foo")
assert len(block_devices) == 0

View file

@ -0,0 +1,54 @@
{
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": {
"token": "TEST_TOKEN",
"enableSwagger": false
},
"bitwarden": {
"enable": true
},
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD",
"domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD",
"hostname": "test-instance",
"nextcloud": {
"adminPassword": "ADMIN",
"databasePassword": "ADMIN",
"enable": true
},
"resticPassword": "PASS",
"ssh": {
"enable": true,
"passwordAuthentication": true,
"rootKeys": [
"ssh-ed25519 KEY test@pc"
]
},
"username": "tester",
"gitea": {
"enable": false
},
"ocserv": {
"enable": true
},
"pleroma": {
"enable": true
},
"autoUpgrade": {
"enable": true,
"allowReboot": true
},
"timezone": "Europe/Moscow",
"sshKeys": [
"ssh-rsa KEY test@pc"
],
"volumes": [
]
}

View file

@ -0,0 +1,59 @@
{
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": {
"token": "TEST_TOKEN",
"enableSwagger": false
},
"bitwarden": {
"enable": true
},
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD",
"domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD",
"hostname": "test-instance",
"nextcloud": {
"adminPassword": "ADMIN",
"databasePassword": "ADMIN",
"enable": true
},
"resticPassword": "PASS",
"ssh": {
"enable": true,
"passwordAuthentication": true,
"rootKeys": [
"ssh-ed25519 KEY test@pc"
]
},
"username": "tester",
"gitea": {
"enable": false
},
"ocserv": {
"enable": true
},
"pleroma": {
"enable": true
},
"autoUpgrade": {
"enable": true,
"allowReboot": true
},
"timezone": "Europe/Moscow",
"sshKeys": [
"ssh-rsa KEY test@pc"
],
"volumes": [
{
"device": "/dev/sda1",
"mountPoint": "/volumes/sda1",
"filesystem": "ext4"
}
]
}

Some files were not shown because too many files have changed in this diff Show more