Merge pull request 'Dismantle REST API' (#55) from remove-rest into master

Reviewed-on: https://git.selfprivacy.org/SelfPrivacy/selfprivacy-rest-api/pulls/55
Reviewed-by: Inex Code <inex.code@selfprivacy.org>
This commit is contained in:
Inex Code 2024-01-09 20:58:51 +02:00
commit 6b4920a0e7
114 changed files with 2635 additions and 6372 deletions

View file

@ -2,7 +2,7 @@
import os import os
import subprocess import subprocess
import pytz import pytz
from typing import Optional from typing import Optional, List
from pydantic import BaseModel from pydantic import BaseModel
from selfprivacy_api.utils import WriteUserData, ReadUserData from selfprivacy_api.utils import WriteUserData, ReadUserData
@ -58,36 +58,56 @@ def set_auto_upgrade_settings(
user_data["autoUpgrade"]["allowReboot"] = allowReboot user_data["autoUpgrade"]["allowReboot"] = allowReboot
class ShellException(Exception):
"""Something went wrong when calling another process"""
pass
def run_blocking(cmd: List[str], new_session: bool = False) -> str:
"""Run a process, block until done, return output, complain if failed"""
process_handle = subprocess.Popen(
cmd,
shell=False,
start_new_session=new_session,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
stdout_raw, stderr_raw = process_handle.communicate()
stdout = stdout_raw.decode("utf-8")
if stderr_raw is not None:
stderr = stderr_raw.decode("utf-8")
else:
stderr = ""
output = stdout + "\n" + stderr
if process_handle.returncode != 0:
raise ShellException(
f"Shell command failed, command array: {cmd}, output: {output}"
)
return stdout
def rebuild_system() -> int: def rebuild_system() -> int:
"""Rebuild the system""" """Rebuild the system"""
rebuild_result = subprocess.Popen( run_blocking(["systemctl", "start", "sp-nixos-rebuild.service"], new_session=True)
["systemctl", "start", "sp-nixos-rebuild.service"], start_new_session=True return 0
)
rebuild_result.communicate()[0]
return rebuild_result.returncode
def rollback_system() -> int: def rollback_system() -> int:
"""Rollback the system""" """Rollback the system"""
rollback_result = subprocess.Popen( run_blocking(["systemctl", "start", "sp-nixos-rollback.service"], new_session=True)
["systemctl", "start", "sp-nixos-rollback.service"], start_new_session=True return 0
)
rollback_result.communicate()[0]
return rollback_result.returncode
def upgrade_system() -> int: def upgrade_system() -> int:
"""Upgrade the system""" """Upgrade the system"""
upgrade_result = subprocess.Popen( run_blocking(["systemctl", "start", "sp-nixos-upgrade.service"], new_session=True)
["systemctl", "start", "sp-nixos-upgrade.service"], start_new_session=True return 0
)
upgrade_result.communicate()[0]
return upgrade_result.returncode
def reboot_system() -> None: def reboot_system() -> None:
"""Reboot the system""" """Reboot the system"""
subprocess.Popen(["reboot"], start_new_session=True) run_blocking(["reboot"], new_session=True)
def get_system_version() -> str: def get_system_version() -> str:

View file

@ -58,7 +58,7 @@ def get_users(
) )
for user in user_data["users"] for user in user_data["users"]
] ]
if not exclude_primary: if not exclude_primary and "username" in user_data.keys():
users.append( users.append(
UserDataUser( UserDataUser(
username=user_data["username"], username=user_data["username"],
@ -107,6 +107,12 @@ class PasswordIsEmpty(Exception):
pass pass
class InvalidConfiguration(Exception):
"""The userdata is broken"""
pass
def create_user(username: str, password: str): def create_user(username: str, password: str):
if password == "": if password == "":
raise PasswordIsEmpty("Password is empty") raise PasswordIsEmpty("Password is empty")
@ -124,6 +130,10 @@ def create_user(username: str, password: str):
with ReadUserData() as user_data: with ReadUserData() as user_data:
ensure_ssh_and_users_fields_exist(user_data) ensure_ssh_and_users_fields_exist(user_data)
if "username" not in user_data.keys():
raise InvalidConfiguration(
"Broken config: Admin name is not defined. Consider recovery or add it manually"
)
if username == user_data["username"]: if username == user_data["username"]:
raise UserAlreadyExists("User already exists") raise UserAlreadyExists("User already exists")
if username in [user["username"] for user in user_data["users"]]: if username in [user["username"] for user in user_data["users"]]:

View file

@ -10,12 +10,6 @@ from selfprivacy_api.dependencies import get_api_version
from selfprivacy_api.graphql.schema import schema from selfprivacy_api.graphql.schema import schema
from selfprivacy_api.migrations import run_migrations from selfprivacy_api.migrations import run_migrations
from selfprivacy_api.rest import (
system,
users,
api_auth,
services,
)
app = FastAPI() app = FastAPI()
@ -32,10 +26,6 @@ app.add_middleware(
) )
app.include_router(system.router)
app.include_router(users.router)
app.include_router(api_auth.router)
app.include_router(services.router)
app.include_router(graphql_app, prefix="/graphql") app.include_router(graphql_app, prefix="/graphql")

View file

@ -197,6 +197,8 @@ class ResticBackupper(AbstractBackupper):
output, output,
"parsed messages:", "parsed messages:",
messages, messages,
"command: ",
backup_command,
) from error ) from error
@staticmethod @staticmethod

View file

@ -11,7 +11,9 @@ from selfprivacy_api.graphql.common_types.backup import (
from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.models.backup.snapshot import Snapshot
from selfprivacy_api.utils.huey import huey from selfprivacy_api.utils.huey import huey
from huey import crontab from huey import crontab
from selfprivacy_api.services.service import Service from selfprivacy_api.services.service import Service
from selfprivacy_api.services import get_service_by_id
from selfprivacy_api.backup import Backups from selfprivacy_api.backup import Backups
from selfprivacy_api.jobs import Jobs, JobStatus, Job from selfprivacy_api.jobs import Jobs, JobStatus, Job
@ -31,12 +33,13 @@ def validate_datetime(dt: datetime) -> bool:
# huey tasks need to return something # huey tasks need to return something
@huey.task() @huey.task()
def start_backup( def start_backup(service_id: str, reason: BackupReason = BackupReason.EXPLICIT) -> bool:
service: Service, reason: BackupReason = BackupReason.EXPLICIT
) -> bool:
""" """
The worker task that starts the backup process. The worker task that starts the backup process.
""" """
service = get_service_by_id(service_id)
if service is None:
raise ValueError(f"No such service: {service_id}")
Backups.back_up(service, reason) Backups.back_up(service, reason)
return True return True

View file

@ -27,4 +27,4 @@ async def get_token_header(
def get_api_version() -> str: def get_api_version() -> str:
"""Get API version""" """Get API version"""
return "2.4.3" return "3.0.0"

View file

@ -148,7 +148,7 @@ class BackupMutations:
) )
job = add_backup_job(service) job = add_backup_job(service)
start_backup(service) start_backup(service_id)
return GenericJobMutationReturn( return GenericJobMutationReturn(
success=True, success=True,

View file

@ -4,6 +4,7 @@ import typing
import strawberry import strawberry
from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.graphql import IsAuthenticated
from selfprivacy_api.graphql.common_types.jobs import job_to_api_job from selfprivacy_api.graphql.common_types.jobs import job_to_api_job
from selfprivacy_api.jobs import JobStatus
from selfprivacy_api.graphql.common_types.service import ( from selfprivacy_api.graphql.common_types.service import (
Service, Service,
@ -47,6 +48,7 @@ class ServicesMutations:
@strawberry.mutation(permission_classes=[IsAuthenticated]) @strawberry.mutation(permission_classes=[IsAuthenticated])
def enable_service(self, service_id: str) -> ServiceMutationReturn: def enable_service(self, service_id: str) -> ServiceMutationReturn:
"""Enable service.""" """Enable service."""
try:
service = get_service_by_id(service_id) service = get_service_by_id(service_id)
if service is None: if service is None:
return ServiceMutationReturn( return ServiceMutationReturn(
@ -55,6 +57,13 @@ class ServicesMutations:
code=404, code=404,
) )
service.enable() service.enable()
except Exception as e:
return ServiceMutationReturn(
success=False,
message=format_error(e),
code=400,
)
return ServiceMutationReturn( return ServiceMutationReturn(
success=True, success=True,
message="Service enabled.", message="Service enabled.",
@ -65,6 +74,7 @@ class ServicesMutations:
@strawberry.mutation(permission_classes=[IsAuthenticated]) @strawberry.mutation(permission_classes=[IsAuthenticated])
def disable_service(self, service_id: str) -> ServiceMutationReturn: def disable_service(self, service_id: str) -> ServiceMutationReturn:
"""Disable service.""" """Disable service."""
try:
service = get_service_by_id(service_id) service = get_service_by_id(service_id)
if service is None: if service is None:
return ServiceMutationReturn( return ServiceMutationReturn(
@ -73,6 +83,12 @@ class ServicesMutations:
code=404, code=404,
) )
service.disable() service.disable()
except Exception as e:
return ServiceMutationReturn(
success=False,
message=format_error(e),
code=400,
)
return ServiceMutationReturn( return ServiceMutationReturn(
success=True, success=True,
message="Service disabled.", message="Service disabled.",
@ -144,6 +160,8 @@ class ServicesMutations:
message="Service not found.", message="Service not found.",
code=404, code=404,
) )
# TODO: make serviceImmovable and BlockdeviceNotFound exceptions
# in the move_to_volume() function and handle them here
if not service.is_movable(): if not service.is_movable():
return ServiceJobMutationReturn( return ServiceJobMutationReturn(
success=False, success=False,
@ -160,6 +178,15 @@ class ServicesMutations:
service=service_to_graphql_service(service), service=service_to_graphql_service(service),
) )
job = service.move_to_volume(volume) job = service.move_to_volume(volume)
if job.status in [JobStatus.CREATED, JobStatus.RUNNING]:
return ServiceJobMutationReturn(
success=True,
message="Started moving the service.",
code=200,
service=service_to_graphql_service(service),
job=job_to_api_job(job),
)
elif job.status == JobStatus.FINISHED:
return ServiceJobMutationReturn( return ServiceJobMutationReturn(
success=True, success=True,
message="Service moved.", message="Service moved.",
@ -167,3 +194,15 @@ class ServicesMutations:
service=service_to_graphql_service(service), service=service_to_graphql_service(service),
job=job_to_api_job(job), job=job_to_api_job(job),
) )
else:
return ServiceJobMutationReturn(
success=False,
message=f"Service move failure: {job.status_text}",
code=400,
service=service_to_graphql_service(service),
job=job_to_api_job(job),
)
def format_error(e: Exception) -> str:
return type(e).__name__ + ": " + str(e)

View file

@ -9,6 +9,7 @@ from selfprivacy_api.graphql.mutations.mutation_interface import (
) )
import selfprivacy_api.actions.system as system_actions import selfprivacy_api.actions.system as system_actions
import selfprivacy_api.actions.ssh as ssh_actions
@strawberry.type @strawberry.type
@ -26,6 +27,22 @@ class AutoUpgradeSettingsMutationReturn(MutationReturnInterface):
allowReboot: bool allowReboot: bool
@strawberry.type
class SSHSettingsMutationReturn(MutationReturnInterface):
"""A return type for after changing SSH settings"""
enable: bool
password_authentication: bool
@strawberry.input
class SSHSettingsInput:
"""Input type for SSH settings"""
enable: bool
password_authentication: bool
@strawberry.input @strawberry.input
class AutoUpgradeSettingsInput: class AutoUpgradeSettingsInput:
"""Input type for auto upgrade settings""" """Input type for auto upgrade settings"""
@ -76,41 +93,89 @@ class SystemMutations:
allowReboot=new_settings.allowReboot, allowReboot=new_settings.allowReboot,
) )
@strawberry.mutation(permission_classes=[IsAuthenticated])
def change_ssh_settings(
self, settings: SSHSettingsInput
) -> SSHSettingsMutationReturn:
"""Change ssh settings of the server."""
ssh_actions.set_ssh_settings(
enable=settings.enable,
password_authentication=settings.password_authentication,
)
new_settings = ssh_actions.get_ssh_settings()
return SSHSettingsMutationReturn(
success=True,
message="SSH settings changed",
code=200,
enable=new_settings.enable,
password_authentication=new_settings.passwordAuthentication,
)
@strawberry.mutation(permission_classes=[IsAuthenticated]) @strawberry.mutation(permission_classes=[IsAuthenticated])
def run_system_rebuild(self) -> GenericMutationReturn: def run_system_rebuild(self) -> GenericMutationReturn:
try:
system_actions.rebuild_system() system_actions.rebuild_system()
return GenericMutationReturn( return GenericMutationReturn(
success=True, success=True,
message="Starting rebuild system", message="Starting rebuild system",
code=200, code=200,
) )
except system_actions.ShellException as e:
return GenericMutationReturn(
success=False,
message=str(e),
code=500,
)
@strawberry.mutation(permission_classes=[IsAuthenticated]) @strawberry.mutation(permission_classes=[IsAuthenticated])
def run_system_rollback(self) -> GenericMutationReturn: def run_system_rollback(self) -> GenericMutationReturn:
system_actions.rollback_system() system_actions.rollback_system()
try:
return GenericMutationReturn( return GenericMutationReturn(
success=True, success=True,
message="Starting rebuild system", message="Starting rebuild system",
code=200, code=200,
) )
except system_actions.ShellException as e:
return GenericMutationReturn(
success=False,
message=str(e),
code=500,
)
@strawberry.mutation(permission_classes=[IsAuthenticated]) @strawberry.mutation(permission_classes=[IsAuthenticated])
def run_system_upgrade(self) -> GenericMutationReturn: def run_system_upgrade(self) -> GenericMutationReturn:
system_actions.upgrade_system() system_actions.upgrade_system()
try:
return GenericMutationReturn( return GenericMutationReturn(
success=True, success=True,
message="Starting rebuild system", message="Starting rebuild system",
code=200, code=200,
) )
except system_actions.ShellException as e:
return GenericMutationReturn(
success=False,
message=str(e),
code=500,
)
@strawberry.mutation(permission_classes=[IsAuthenticated]) @strawberry.mutation(permission_classes=[IsAuthenticated])
def reboot_system(self) -> GenericMutationReturn: def reboot_system(self) -> GenericMutationReturn:
system_actions.reboot_system() system_actions.reboot_system()
try:
return GenericMutationReturn( return GenericMutationReturn(
success=True, success=True,
message="System reboot has started", message="System reboot has started",
code=200, code=200,
) )
except system_actions.ShellException as e:
return GenericMutationReturn(
success=False,
message=str(e),
code=500,
)
@strawberry.mutation(permission_classes=[IsAuthenticated]) @strawberry.mutation(permission_classes=[IsAuthenticated])
def pull_repository_changes(self) -> GenericMutationReturn: def pull_repository_changes(self) -> GenericMutationReturn:

View file

@ -69,6 +69,12 @@ class UsersMutations:
message=str(e), message=str(e),
code=400, code=400,
) )
except users_actions.InvalidConfiguration as e:
return UserMutationReturn(
success=False,
message=str(e),
code=400,
)
except users_actions.UserAlreadyExists as e: except users_actions.UserAlreadyExists as e:
return UserMutationReturn( return UserMutationReturn(
success=False, success=False,

View file

@ -19,6 +19,7 @@ from selfprivacy_api.migrations.migrate_to_selfprivacy_channel import (
) )
from selfprivacy_api.migrations.mount_volume import MountVolume from selfprivacy_api.migrations.mount_volume import MountVolume
from selfprivacy_api.migrations.providers import CreateProviderFields from selfprivacy_api.migrations.providers import CreateProviderFields
from selfprivacy_api.migrations.modules_in_json import CreateModulesField
from selfprivacy_api.migrations.prepare_for_nixos_2211 import ( from selfprivacy_api.migrations.prepare_for_nixos_2211 import (
MigrateToSelfprivacyChannelFrom2205, MigrateToSelfprivacyChannelFrom2205,
) )
@ -37,6 +38,7 @@ migrations = [
MigrateToSelfprivacyChannelFrom2205(), MigrateToSelfprivacyChannelFrom2205(),
MigrateToSelfprivacyChannelFrom2211(), MigrateToSelfprivacyChannelFrom2211(),
LoadTokensToRedis(), LoadTokensToRedis(),
CreateModulesField(),
] ]

View file

@ -0,0 +1,50 @@
from selfprivacy_api.migrations.migration import Migration
from selfprivacy_api.utils import ReadUserData, WriteUserData
from selfprivacy_api.services import get_all_services
def migrate_services_to_modules():
with WriteUserData() as userdata:
if "modules" not in userdata.keys():
userdata["modules"] = {}
for service in get_all_services():
name = service.get_id()
if name in userdata.keys():
field_content = userdata[name]
userdata["modules"][name] = field_content
del userdata[name]
# If you ever want to get rid of modules field you will need to get rid of this migration
class CreateModulesField(Migration):
"""introduce 'modules' (services) into userdata"""
def get_migration_name(self):
return "modules_in_json"
def get_migration_description(self):
return "Group service settings into a 'modules' field in userdata.json"
def is_migration_needed(self) -> bool:
try:
with ReadUserData() as userdata:
for service in get_all_services():
if service.get_id() in userdata.keys():
return True
if "modules" not in userdata.keys():
return True
return False
except Exception as e:
print(e)
return False
def migrate(self):
# Write info about providers to userdata.json
try:
migrate_services_to_modules()
print("Done")
except Exception as e:
print(e)
print("Error migrating service fields")

View file

@ -1,125 +0,0 @@
from datetime import datetime
from typing import Optional
from fastapi import APIRouter, Depends, HTTPException
from pydantic import BaseModel
from selfprivacy_api.actions.api_tokens import (
CannotDeleteCallerException,
InvalidExpirationDate,
InvalidUsesLeft,
NotFoundException,
delete_api_token,
refresh_api_token,
get_api_recovery_token_status,
get_api_tokens_with_caller_flag,
get_new_api_recovery_key,
use_mnemonic_recovery_token,
delete_new_device_auth_token,
get_new_device_auth_token,
use_new_device_auth_token,
)
from selfprivacy_api.dependencies import TokenHeader, get_token_header
router = APIRouter(
prefix="/auth",
tags=["auth"],
responses={404: {"description": "Not found"}},
)
@router.get("/tokens")
async def rest_get_tokens(auth_token: TokenHeader = Depends(get_token_header)):
"""Get the tokens info"""
return get_api_tokens_with_caller_flag(auth_token.token)
class DeleteTokenInput(BaseModel):
"""Delete token input"""
token_name: str
@router.delete("/tokens")
async def rest_delete_tokens(
token: DeleteTokenInput, auth_token: TokenHeader = Depends(get_token_header)
):
"""Delete the tokens"""
try:
delete_api_token(auth_token.token, token.token_name)
except NotFoundException:
raise HTTPException(status_code=404, detail="Token not found")
except CannotDeleteCallerException:
raise HTTPException(status_code=400, detail="Cannot delete caller's token")
return {"message": "Token deleted"}
@router.post("/tokens")
async def rest_refresh_token(auth_token: TokenHeader = Depends(get_token_header)):
"""Refresh the token"""
try:
new_token = refresh_api_token(auth_token.token)
except NotFoundException:
raise HTTPException(status_code=404, detail="Token not found")
return {"token": new_token}
@router.get("/recovery_token")
async def rest_get_recovery_token_status(
auth_token: TokenHeader = Depends(get_token_header),
):
return get_api_recovery_token_status()
class CreateRecoveryTokenInput(BaseModel):
expiration: Optional[datetime] = None
uses: Optional[int] = None
@router.post("/recovery_token")
async def rest_create_recovery_token(
limits: CreateRecoveryTokenInput = CreateRecoveryTokenInput(),
auth_token: TokenHeader = Depends(get_token_header),
):
try:
token = get_new_api_recovery_key(limits.expiration, limits.uses)
except InvalidExpirationDate as e:
raise HTTPException(status_code=400, detail=str(e))
except InvalidUsesLeft as e:
raise HTTPException(status_code=400, detail=str(e))
return {"token": token}
class UseTokenInput(BaseModel):
token: str
device: str
@router.post("/recovery_token/use")
async def rest_use_recovery_token(input: UseTokenInput):
token = use_mnemonic_recovery_token(input.token, input.device)
if token is None:
raise HTTPException(status_code=404, detail="Token not found")
return {"token": token}
@router.post("/new_device")
async def rest_new_device(auth_token: TokenHeader = Depends(get_token_header)):
token = get_new_device_auth_token()
return {"token": token}
@router.delete("/new_device")
async def rest_delete_new_device_token(
auth_token: TokenHeader = Depends(get_token_header),
):
delete_new_device_auth_token()
return {"token": None}
@router.post("/new_device/authorize")
async def rest_new_device_authorize(input: UseTokenInput):
token = use_new_device_auth_token(input.token, input.device)
if token is None:
raise HTTPException(status_code=404, detail="Token not found")
return {"message": "Device authorized", "token": token}

View file

@ -1,336 +0,0 @@
"""Basic services legacy api"""
import base64
from typing import Optional
from fastapi import APIRouter, Depends, HTTPException
from pydantic import BaseModel
from selfprivacy_api.actions.ssh import (
InvalidPublicKey,
KeyAlreadyExists,
KeyNotFound,
create_ssh_key,
enable_ssh,
get_ssh_settings,
remove_ssh_key,
set_ssh_settings,
)
from selfprivacy_api.actions.users import UserNotFound, get_user_by_username
from selfprivacy_api.dependencies import get_token_header
from selfprivacy_api.services.bitwarden import Bitwarden
from selfprivacy_api.services.gitea import Gitea
from selfprivacy_api.services.mailserver import MailServer
from selfprivacy_api.services.nextcloud import Nextcloud
from selfprivacy_api.services.ocserv import Ocserv
from selfprivacy_api.services.pleroma import Pleroma
from selfprivacy_api.services.service import ServiceStatus
from selfprivacy_api.utils import get_dkim_key, get_domain
router = APIRouter(
prefix="/services",
tags=["services"],
dependencies=[Depends(get_token_header)],
responses={404: {"description": "Not found"}},
)
def service_status_to_return_code(status: ServiceStatus):
"""Converts service status object to return code for
compatibility with legacy api"""
if status == ServiceStatus.ACTIVE:
return 0
elif status == ServiceStatus.FAILED:
return 1
elif status == ServiceStatus.INACTIVE:
return 3
elif status == ServiceStatus.OFF:
return 4
else:
return 2
@router.get("/status")
async def get_status():
"""Get the status of the services"""
mail_status = MailServer.get_status()
bitwarden_status = Bitwarden.get_status()
gitea_status = Gitea.get_status()
nextcloud_status = Nextcloud.get_status()
ocserv_stauts = Ocserv.get_status()
pleroma_status = Pleroma.get_status()
return {
"imap": service_status_to_return_code(mail_status),
"smtp": service_status_to_return_code(mail_status),
"http": 0,
"bitwarden": service_status_to_return_code(bitwarden_status),
"gitea": service_status_to_return_code(gitea_status),
"nextcloud": service_status_to_return_code(nextcloud_status),
"ocserv": service_status_to_return_code(ocserv_stauts),
"pleroma": service_status_to_return_code(pleroma_status),
}
@router.post("/bitwarden/enable")
async def enable_bitwarden():
"""Enable Bitwarden"""
Bitwarden.enable()
return {
"status": 0,
"message": "Bitwarden enabled",
}
@router.post("/bitwarden/disable")
async def disable_bitwarden():
"""Disable Bitwarden"""
Bitwarden.disable()
return {
"status": 0,
"message": "Bitwarden disabled",
}
@router.post("/gitea/enable")
async def enable_gitea():
"""Enable Gitea"""
Gitea.enable()
return {
"status": 0,
"message": "Gitea enabled",
}
@router.post("/gitea/disable")
async def disable_gitea():
"""Disable Gitea"""
Gitea.disable()
return {
"status": 0,
"message": "Gitea disabled",
}
@router.get("/mailserver/dkim")
async def get_mailserver_dkim():
"""Get the DKIM record for the mailserver"""
domain = get_domain()
dkim = get_dkim_key(domain, parse=False)
if dkim is None:
raise HTTPException(status_code=404, detail="DKIM record not found")
dkim = base64.b64encode(dkim.encode("utf-8")).decode("utf-8")
return dkim
@router.post("/nextcloud/enable")
async def enable_nextcloud():
"""Enable Nextcloud"""
Nextcloud.enable()
return {
"status": 0,
"message": "Nextcloud enabled",
}
@router.post("/nextcloud/disable")
async def disable_nextcloud():
"""Disable Nextcloud"""
Nextcloud.disable()
return {
"status": 0,
"message": "Nextcloud disabled",
}
@router.post("/ocserv/enable")
async def enable_ocserv():
"""Enable Ocserv"""
Ocserv.enable()
return {
"status": 0,
"message": "Ocserv enabled",
}
@router.post("/ocserv/disable")
async def disable_ocserv():
"""Disable Ocserv"""
Ocserv.disable()
return {
"status": 0,
"message": "Ocserv disabled",
}
@router.post("/pleroma/enable")
async def enable_pleroma():
"""Enable Pleroma"""
Pleroma.enable()
return {
"status": 0,
"message": "Pleroma enabled",
}
@router.post("/pleroma/disable")
async def disable_pleroma():
"""Disable Pleroma"""
Pleroma.disable()
return {
"status": 0,
"message": "Pleroma disabled",
}
@router.get("/restic/backup/list")
async def get_restic_backup_list():
raise HTTPException(
status_code=410,
detail="This endpoint is deprecated, please use GraphQL API",
)
@router.put("/restic/backup/create")
async def create_restic_backup():
raise HTTPException(
status_code=410,
detail="This endpoint is deprecated, please use GraphQL API",
)
@router.get("/restic/backup/status")
async def get_restic_backup_status():
raise HTTPException(
status_code=410,
detail="This endpoint is deprecated, please use GraphQL API",
)
@router.get("/restic/backup/reload")
async def reload_restic_backup():
raise HTTPException(
status_code=410,
detail="This endpoint is deprecated, please use GraphQL API",
)
class BackupRestoreInput(BaseModel):
backupId: str
@router.put("/restic/backup/restore")
async def restore_restic_backup(backup: BackupRestoreInput):
raise HTTPException(
status_code=410,
detail="This endpoint is deprecated, please use GraphQL API",
)
class BackupConfigInput(BaseModel):
accountId: str
accountKey: str
bucket: str
@router.put("/restic/backblaze/config")
async def set_backblaze_config(backup_config: BackupConfigInput):
raise HTTPException(
status_code=410,
detail="This endpoint is deprecated, please use GraphQL API",
)
@router.post("/ssh/enable")
async def rest_enable_ssh():
"""Enable SSH"""
enable_ssh()
return {
"status": 0,
"message": "SSH enabled",
}
@router.get("/ssh")
async def rest_get_ssh():
"""Get the SSH configuration"""
settings = get_ssh_settings()
return {
"enable": settings.enable,
"passwordAuthentication": settings.passwordAuthentication,
}
class SshConfigInput(BaseModel):
enable: Optional[bool] = None
passwordAuthentication: Optional[bool] = None
@router.put("/ssh")
async def rest_set_ssh(ssh_config: SshConfigInput):
"""Set the SSH configuration"""
set_ssh_settings(ssh_config.enable, ssh_config.passwordAuthentication)
return "SSH settings changed"
class SshKeyInput(BaseModel):
public_key: str
@router.put("/ssh/key/send", status_code=201)
async def rest_send_ssh_key(input: SshKeyInput):
"""Send the SSH key"""
try:
create_ssh_key("root", input.public_key)
except KeyAlreadyExists as error:
raise HTTPException(status_code=409, detail="Key already exists") from error
except InvalidPublicKey as error:
raise HTTPException(
status_code=400,
detail="Invalid key type. Only ssh-ed25519 and ssh-rsa are supported",
) from error
return {
"status": 0,
"message": "SSH key sent",
}
@router.get("/ssh/keys/{username}")
async def rest_get_ssh_keys(username: str):
"""Get the SSH keys for a user"""
user = get_user_by_username(username)
if user is None:
raise HTTPException(status_code=404, detail="User not found")
return user.ssh_keys
@router.post("/ssh/keys/{username}", status_code=201)
async def rest_add_ssh_key(username: str, input: SshKeyInput):
try:
create_ssh_key(username, input.public_key)
except KeyAlreadyExists as error:
raise HTTPException(status_code=409, detail="Key already exists") from error
except InvalidPublicKey as error:
raise HTTPException(
status_code=400,
detail="Invalid key type. Only ssh-ed25519 and ssh-rsa are supported",
) from error
except UserNotFound as error:
raise HTTPException(status_code=404, detail="User not found") from error
return {
"message": "New SSH key successfully written",
}
@router.delete("/ssh/keys/{username}")
async def rest_delete_ssh_key(username: str, input: SshKeyInput):
try:
remove_ssh_key(username, input.public_key)
except KeyNotFound as error:
raise HTTPException(status_code=404, detail="Key not found") from error
except UserNotFound as error:
raise HTTPException(status_code=404, detail="User not found") from error
return {"message": "SSH key deleted"}

View file

@ -1,105 +0,0 @@
from typing import Optional
from fastapi import APIRouter, Body, Depends, HTTPException
from pydantic import BaseModel
from selfprivacy_api.dependencies import get_token_header
import selfprivacy_api.actions.system as system_actions
router = APIRouter(
prefix="/system",
tags=["system"],
dependencies=[Depends(get_token_header)],
responses={404: {"description": "Not found"}},
)
@router.get("/configuration/timezone")
async def get_timezone():
"""Get the timezone of the server"""
return system_actions.get_timezone()
class ChangeTimezoneRequestBody(BaseModel):
"""Change the timezone of the server"""
timezone: str
@router.put("/configuration/timezone")
async def change_timezone(timezone: ChangeTimezoneRequestBody):
"""Change the timezone of the server"""
try:
system_actions.change_timezone(timezone.timezone)
except system_actions.InvalidTimezone as e:
raise HTTPException(status_code=400, detail=str(e))
return {"timezone": timezone.timezone}
@router.get("/configuration/autoUpgrade")
async def get_auto_upgrade_settings():
"""Get the auto-upgrade settings"""
return system_actions.get_auto_upgrade_settings().dict()
class AutoUpgradeSettings(BaseModel):
"""Settings for auto-upgrading user data"""
enable: Optional[bool] = None
allowReboot: Optional[bool] = None
@router.put("/configuration/autoUpgrade")
async def set_auto_upgrade_settings(settings: AutoUpgradeSettings):
"""Set the auto-upgrade settings"""
system_actions.set_auto_upgrade_settings(settings.enable, settings.allowReboot)
return "Auto-upgrade settings changed"
@router.get("/configuration/apply")
async def apply_configuration():
"""Apply the configuration"""
return_code = system_actions.rebuild_system()
return return_code
@router.get("/configuration/rollback")
async def rollback_configuration():
"""Rollback the configuration"""
return_code = system_actions.rollback_system()
return return_code
@router.get("/configuration/upgrade")
async def upgrade_configuration():
"""Upgrade the configuration"""
return_code = system_actions.upgrade_system()
return return_code
@router.get("/reboot")
async def reboot_system():
"""Reboot the system"""
system_actions.reboot_system()
return "System reboot has started"
@router.get("/version")
async def get_system_version():
"""Get the system version"""
return {"system_version": system_actions.get_system_version()}
@router.get("/pythonVersion")
async def get_python_version():
"""Get the Python version"""
return system_actions.get_python_version()
@router.get("/configuration/pull")
async def pull_configuration():
"""Pull the configuration"""
action_result = system_actions.pull_repository_changes()
if action_result.status == 0:
return action_result.dict()
raise HTTPException(status_code=500, detail=action_result.dict())

View file

@ -1,62 +0,0 @@
"""Users management module"""
from typing import Optional
from fastapi import APIRouter, Body, Depends, HTTPException
from pydantic import BaseModel
import selfprivacy_api.actions.users as users_actions
from selfprivacy_api.dependencies import get_token_header
router = APIRouter(
prefix="/users",
tags=["users"],
dependencies=[Depends(get_token_header)],
responses={404: {"description": "Not found"}},
)
@router.get("")
async def get_users(withMainUser: bool = False):
"""Get the list of users"""
users: list[users_actions.UserDataUser] = users_actions.get_users(
exclude_primary=not withMainUser, exclude_root=True
)
return [user.username for user in users]
class UserInput(BaseModel):
"""User input"""
username: str
password: str
@router.post("", status_code=201)
async def create_user(user: UserInput):
try:
users_actions.create_user(user.username, user.password)
except users_actions.PasswordIsEmpty as e:
raise HTTPException(status_code=400, detail=str(e))
except users_actions.UsernameForbidden as e:
raise HTTPException(status_code=409, detail=str(e))
except users_actions.UsernameNotAlphanumeric as e:
raise HTTPException(status_code=400, detail=str(e))
except users_actions.UsernameTooLong as e:
raise HTTPException(status_code=400, detail=str(e))
except users_actions.UserAlreadyExists as e:
raise HTTPException(status_code=409, detail=str(e))
return {"result": 0, "username": user.username}
@router.delete("/{username}")
async def delete_user(username: str):
try:
users_actions.delete_user(username)
except users_actions.UserNotFound as e:
raise HTTPException(status_code=404, detail=str(e))
except users_actions.UserIsProtected as e:
raise HTTPException(status_code=400, detail=str(e))
return {"result": 0, "username": username}

View file

@ -58,11 +58,6 @@ class Bitwarden(Service):
def get_backup_description() -> str: def get_backup_description() -> str:
return "Password database, encryption certificate and attachments." return "Password database, encryption certificate and attachments."
@staticmethod
def is_enabled() -> bool:
with ReadUserData() as user_data:
return user_data.get("bitwarden", {}).get("enable", False)
@staticmethod @staticmethod
def get_status() -> ServiceStatus: def get_status() -> ServiceStatus:
""" """
@ -76,22 +71,6 @@ class Bitwarden(Service):
""" """
return get_service_status("vaultwarden.service") return get_service_status("vaultwarden.service")
@staticmethod
def enable():
"""Enable Bitwarden service."""
with WriteUserData() as user_data:
if "bitwarden" not in user_data:
user_data["bitwarden"] = {}
user_data["bitwarden"]["enable"] = True
@staticmethod
def disable():
"""Disable Bitwarden service."""
with WriteUserData() as user_data:
if "bitwarden" not in user_data:
user_data["bitwarden"] = {}
user_data["bitwarden"]["enable"] = False
@staticmethod @staticmethod
def stop(): def stop():
subprocess.run(["systemctl", "stop", "vaultwarden.service"]) subprocess.run(["systemctl", "stop", "vaultwarden.service"])

View file

@ -54,11 +54,6 @@ class Gitea(Service):
def get_backup_description() -> str: def get_backup_description() -> str:
return "Git repositories, database and user data." return "Git repositories, database and user data."
@staticmethod
def is_enabled() -> bool:
with ReadUserData() as user_data:
return user_data.get("gitea", {}).get("enable", False)
@staticmethod @staticmethod
def get_status() -> ServiceStatus: def get_status() -> ServiceStatus:
""" """
@ -71,22 +66,6 @@ class Gitea(Service):
""" """
return get_service_status("gitea.service") return get_service_status("gitea.service")
@staticmethod
def enable():
"""Enable Gitea service."""
with WriteUserData() as user_data:
if "gitea" not in user_data:
user_data["gitea"] = {}
user_data["gitea"]["enable"] = True
@staticmethod
def disable():
"""Disable Gitea service."""
with WriteUserData() as user_data:
if "gitea" not in user_data:
user_data["gitea"] = {}
user_data["gitea"]["enable"] = False
@staticmethod @staticmethod
def stop(): def stop():
subprocess.run(["systemctl", "stop", "gitea.service"]) subprocess.run(["systemctl", "stop", "gitea.service"])

View file

@ -55,33 +55,12 @@ class Jitsi(Service):
def get_backup_description() -> str: def get_backup_description() -> str:
return "Secrets that are used to encrypt the communication." return "Secrets that are used to encrypt the communication."
@staticmethod
def is_enabled() -> bool:
with ReadUserData() as user_data:
return user_data.get("jitsi", {}).get("enable", False)
@staticmethod @staticmethod
def get_status() -> ServiceStatus: def get_status() -> ServiceStatus:
return get_service_status_from_several_units( return get_service_status_from_several_units(
["jitsi-videobridge.service", "jicofo.service"] ["jitsi-videobridge.service", "jicofo.service"]
) )
@staticmethod
def enable():
"""Enable Jitsi service."""
with WriteUserData() as user_data:
if "jitsi" not in user_data:
user_data["jitsi"] = {}
user_data["jitsi"]["enable"] = True
@staticmethod
def disable():
"""Disable Gitea service."""
with WriteUserData() as user_data:
if "jitsi" not in user_data:
user_data["jitsi"] = {}
user_data["jitsi"]["enable"] = False
@staticmethod @staticmethod
def stop(): def stop():
subprocess.run( subprocess.run(

View file

@ -53,11 +53,6 @@ class Nextcloud(Service):
def get_backup_description() -> str: def get_backup_description() -> str:
return "All the files and other data stored in Nextcloud." return "All the files and other data stored in Nextcloud."
@staticmethod
def is_enabled() -> bool:
with ReadUserData() as user_data:
return user_data.get("nextcloud", {}).get("enable", False)
@staticmethod @staticmethod
def get_status() -> ServiceStatus: def get_status() -> ServiceStatus:
""" """
@ -71,22 +66,6 @@ class Nextcloud(Service):
""" """
return get_service_status("phpfpm-nextcloud.service") return get_service_status("phpfpm-nextcloud.service")
@staticmethod
def enable():
"""Enable Nextcloud service."""
with WriteUserData() as user_data:
if "nextcloud" not in user_data:
user_data["nextcloud"] = {}
user_data["nextcloud"]["enable"] = True
@staticmethod
def disable():
"""Disable Nextcloud service."""
with WriteUserData() as user_data:
if "nextcloud" not in user_data:
user_data["nextcloud"] = {}
user_data["nextcloud"]["enable"] = False
@staticmethod @staticmethod
def stop(): def stop():
"""Stop Nextcloud service.""" """Stop Nextcloud service."""

View file

@ -51,29 +51,10 @@ class Ocserv(Service):
def get_backup_description() -> str: def get_backup_description() -> str:
return "Nothing to backup." return "Nothing to backup."
@staticmethod
def is_enabled() -> bool:
with ReadUserData() as user_data:
return user_data.get("ocserv", {}).get("enable", False)
@staticmethod @staticmethod
def get_status() -> ServiceStatus: def get_status() -> ServiceStatus:
return get_service_status("ocserv.service") return get_service_status("ocserv.service")
@staticmethod
def enable():
with WriteUserData() as user_data:
if "ocserv" not in user_data:
user_data["ocserv"] = {}
user_data["ocserv"]["enable"] = True
@staticmethod
def disable():
with WriteUserData() as user_data:
if "ocserv" not in user_data:
user_data["ocserv"] = {}
user_data["ocserv"]["enable"] = False
@staticmethod @staticmethod
def stop(): def stop():
subprocess.run(["systemctl", "stop", "ocserv.service"], check=False) subprocess.run(["systemctl", "stop", "ocserv.service"], check=False)

View file

@ -50,29 +50,10 @@ class Pleroma(Service):
def get_backup_description() -> str: def get_backup_description() -> str:
return "Your Pleroma accounts, posts and media." return "Your Pleroma accounts, posts and media."
@staticmethod
def is_enabled() -> bool:
with ReadUserData() as user_data:
return user_data.get("pleroma", {}).get("enable", False)
@staticmethod @staticmethod
def get_status() -> ServiceStatus: def get_status() -> ServiceStatus:
return get_service_status("pleroma.service") return get_service_status("pleroma.service")
@staticmethod
def enable():
with WriteUserData() as user_data:
if "pleroma" not in user_data:
user_data["pleroma"] = {}
user_data["pleroma"]["enable"] = True
@staticmethod
def disable():
with WriteUserData() as user_data:
if "pleroma" not in user_data:
user_data["pleroma"] = {}
user_data["pleroma"]["enable"] = False
@staticmethod @staticmethod
def stop(): def stop():
subprocess.run(["systemctl", "stop", "pleroma.service"]) subprocess.run(["systemctl", "stop", "pleroma.service"])

View file

@ -12,6 +12,7 @@ from selfprivacy_api.services.generic_size_counter import get_storage_usage
from selfprivacy_api.services.owned_path import OwnedPath from selfprivacy_api.services.owned_path import OwnedPath
from selfprivacy_api import utils from selfprivacy_api import utils
from selfprivacy_api.utils.waitloop import wait_until_true from selfprivacy_api.utils.waitloop import wait_until_true
from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain
DEFAULT_START_STOP_TIMEOUT = 5 * 60 DEFAULT_START_STOP_TIMEOUT = 5 * 60
@ -125,11 +126,17 @@ class Service(ABC):
""" """
pass pass
@staticmethod @classmethod
@abstractmethod def is_enabled(cls) -> bool:
def is_enabled() -> bool: """
"""`True` if the service is enabled.""" `True` if the service is enabled.
pass `False` if it is not enabled or not defined in file
If there is nothing in the file, this is equivalent to False
because NixOS won't enable it then.
"""
name = cls.get_id()
with ReadUserData() as user_data:
return user_data.get("modules", {}).get(name, {}).get("enable", False)
@staticmethod @staticmethod
@abstractmethod @abstractmethod
@ -137,17 +144,25 @@ class Service(ABC):
"""The status of the service, reported by systemd.""" """The status of the service, reported by systemd."""
pass pass
@staticmethod @classmethod
@abstractmethod def _set_enable(cls, enable: bool):
def enable(): name = cls.get_id()
"""Enable the service. Usually this means enabling systemd unit.""" with WriteUserData() as user_data:
pass if "modules" not in user_data:
user_data["modules"] = {}
if name not in user_data["modules"]:
user_data["modules"][name] = {}
user_data["modules"][name]["enable"] = enable
@staticmethod @classmethod
@abstractmethod def enable(cls):
def disable(): """Enable the service. Usually this means enabling systemd unit."""
cls._set_enable(True)
@classmethod
def disable(cls):
"""Disable the service. Usually this means disabling systemd unit.""" """Disable the service. Usually this means disabling systemd unit."""
pass cls._set_enable(False)
@staticmethod @staticmethod
@abstractmethod @abstractmethod
@ -247,6 +262,8 @@ class Service(ABC):
@abstractmethod @abstractmethod
def move_to_volume(self, volume: BlockDevice) -> Job: def move_to_volume(self, volume: BlockDevice) -> Job:
"""Cannot raise errors.
Returns errors as an errored out Job instead."""
pass pass
@classmethod @classmethod

View file

@ -8,9 +8,10 @@ from os import path
# from enum import Enum # from enum import Enum
from selfprivacy_api.jobs import Job from selfprivacy_api.jobs import Job, Jobs, JobStatus
from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus
from selfprivacy_api.utils.block_devices import BlockDevice from selfprivacy_api.utils.block_devices import BlockDevice
from selfprivacy_api.services.generic_service_mover import move_service, FolderMoveNames
import selfprivacy_api.utils.network as network_utils import selfprivacy_api.utils.network as network_utils
from selfprivacy_api.services.test_service.icon import BITWARDEN_ICON from selfprivacy_api.services.test_service.icon import BITWARDEN_ICON
@ -22,16 +23,19 @@ class DummyService(Service):
"""A test service""" """A test service"""
folders: List[str] = [] folders: List[str] = []
startstop_delay = 0 startstop_delay = 0.0
backuppable = True backuppable = True
movable = True
# if False, we try to actually move
simulate_moving = True
drive = "sda1"
def __init_subclass__(cls, folders: List[str]): def __init_subclass__(cls, folders: List[str]):
cls.folders = folders cls.folders = folders
def __init__(self): def __init__(self):
super().__init__() super().__init__()
status_file = self.status_file() with open(self.status_file(), "w") as file:
with open(status_file, "w") as file:
file.write(ServiceStatus.ACTIVE.value) file.write(ServiceStatus.ACTIVE.value)
@staticmethod @staticmethod
@ -61,9 +65,9 @@ class DummyService(Service):
domain = "test.com" domain = "test.com"
return f"https://password.{domain}" return f"https://password.{domain}"
@staticmethod @classmethod
def is_movable() -> bool: def is_movable(cls) -> bool:
return True return cls.movable
@staticmethod @staticmethod
def is_required() -> bool: def is_required() -> bool:
@ -73,10 +77,6 @@ class DummyService(Service):
def get_backup_description() -> str: def get_backup_description() -> str:
return "How did we get here?" return "How did we get here?"
@staticmethod
def is_enabled() -> bool:
return True
@classmethod @classmethod
def status_file(cls) -> str: def status_file(cls) -> str:
dir = cls.folders[0] dir = cls.folders[0]
@ -116,22 +116,30 @@ class DummyService(Service):
we can only set it up dynamically for tests via a classmethod""" we can only set it up dynamically for tests via a classmethod"""
cls.backuppable = new_value cls.backuppable = new_value
@classmethod
def set_movable(cls, new_value: bool) -> None:
"""For tests: because is_movale is static,
we can only set it up dynamically for tests via a classmethod"""
cls.movable = new_value
@classmethod @classmethod
def can_be_backed_up(cls) -> bool: def can_be_backed_up(cls) -> bool:
"""`True` if the service can be backed up.""" """`True` if the service can be backed up."""
return cls.backuppable return cls.backuppable
@classmethod @classmethod
def enable(cls): def set_delay(cls, new_delay_sec: float) -> None:
pass cls.startstop_delay = new_delay_sec
@classmethod @classmethod
def disable(cls, delay): def set_drive(cls, new_drive: str) -> None:
pass cls.drive = new_drive
@classmethod @classmethod
def set_delay(cls, new_delay): def set_simulated_moves(cls, enabled: bool) -> None:
cls.startstop_delay = new_delay """If True, this service will not actually call moving code
when moved"""
cls.simulate_moving = enabled
@classmethod @classmethod
def stop(cls): def stop(cls):
@ -169,9 +177,9 @@ class DummyService(Service):
storage_usage = 0 storage_usage = 0
return storage_usage return storage_usage
@staticmethod @classmethod
def get_drive() -> str: def get_drive(cls) -> str:
return "sda1" return cls.drive
@classmethod @classmethod
def get_folders(cls) -> List[str]: def get_folders(cls) -> List[str]:
@ -198,4 +206,22 @@ class DummyService(Service):
] ]
def move_to_volume(self, volume: BlockDevice) -> Job: def move_to_volume(self, volume: BlockDevice) -> Job:
pass job = Jobs.add(
type_id=f"services.{self.get_id()}.move",
name=f"Move {self.get_display_name()}",
description=f"Moving {self.get_display_name()} data to {volume.name}",
)
if self.simulate_moving is False:
# completely generic code, TODO: make it the default impl.
move_service(
self,
volume,
job,
FolderMoveNames.default_foldermoves(self),
self.get_id(),
)
else:
Jobs.update(job, status=JobStatus.FINISHED)
self.set_drive(volume.name)
return job

View file

@ -6,12 +6,14 @@ import json
import os import os
import subprocess import subprocess
import portalocker import portalocker
import typing
USERDATA_FILE = "/etc/nixos/userdata/userdata.json" USERDATA_FILE = "/etc/nixos/userdata/userdata.json"
TOKENS_FILE = "/etc/nixos/userdata/tokens.json" TOKENS_FILE = "/etc/nixos/userdata/tokens.json"
JOBS_FILE = "/etc/nixos/userdata/jobs.json" JOBS_FILE = "/etc/nixos/userdata/jobs.json"
DOMAIN_FILE = "/var/domain" DOMAIN_FILE = "/var/domain"
DKIM_DIR = "/var/dkim/"
class UserDataFiles(Enum): class UserDataFiles(Enum):
@ -166,26 +168,31 @@ def parse_date(date_str: str) -> datetime.datetime:
raise ValueError("Invalid date string") raise ValueError("Invalid date string")
def get_dkim_key(domain, parse=True): def parse_dkim(dkim: str) -> str:
"""Get DKIM key from /var/dkim/<domain>.selector.txt""" # extract key from file
if os.path.exists("/var/dkim/" + domain + ".selector.txt"): dkim = dkim.split("(")[1]
cat_process = subprocess.Popen( dkim = dkim.split(")")[0]
["cat", "/var/dkim/" + domain + ".selector.txt"], stdout=subprocess.PIPE # replace all quotes with nothing
) dkim = dkim.replace('"', "")
dkim = cat_process.communicate()[0] # trim whitespace, remove newlines and tabs
if parse:
# Extract key from file
dkim = dkim.split(b"(")[1]
dkim = dkim.split(b")")[0]
# Replace all quotes with nothing
dkim = dkim.replace(b'"', b"")
# Trim whitespace, remove newlines and tabs
dkim = dkim.strip() dkim = dkim.strip()
dkim = dkim.replace(b"\n", b"") dkim = dkim.replace("\n", "")
dkim = dkim.replace(b"\t", b"") dkim = dkim.replace("\t", "")
# Remove all redundant spaces # remove all redundant spaces
dkim = b" ".join(dkim.split()) dkim = " ".join(dkim.split())
return str(dkim, "utf-8") return dkim
def get_dkim_key(domain: str, parse: bool = True) -> typing.Optional[str]:
"""Get DKIM key from /var/dkim/<domain>.selector.txt"""
dkim_path = os.path.join(DKIM_DIR, domain + ".selector.txt")
if os.path.exists(dkim_path):
with open(dkim_path, encoding="utf-8") as dkim_file:
dkim = dkim_file.read()
if parse:
dkim = parse_dkim(dkim)
return dkim
return None return None

View file

@ -1,4 +1,5 @@
"""Wrapper for block device functions.""" """A block device API wrapping lsblk"""
from __future__ import annotations
import subprocess import subprocess
import json import json
import typing import typing
@ -11,6 +12,7 @@ def get_block_device(device_name):
""" """
Return a block device by name. Return a block device by name.
""" """
# TODO: remove the function and related tests: dublicated by singleton
lsblk_output = subprocess.check_output( lsblk_output = subprocess.check_output(
[ [
"lsblk", "lsblk",
@ -43,22 +45,37 @@ class BlockDevice:
A block device. A block device.
""" """
def __init__(self, block_device): def __init__(self, device_dict: dict):
self.name = block_device["name"] self.update_from_dict(device_dict)
self.path = block_device["path"]
self.fsavail = str(block_device["fsavail"]) def update_from_dict(self, device_dict: dict):
self.fssize = str(block_device["fssize"]) self.name = device_dict["name"]
self.fstype = block_device["fstype"] self.path = device_dict["path"]
self.fsused = str(block_device["fsused"]) self.fsavail = str(device_dict["fsavail"])
self.mountpoints = block_device["mountpoints"] self.fssize = str(device_dict["fssize"])
self.label = block_device["label"] self.fstype = device_dict["fstype"]
self.uuid = block_device["uuid"] self.fsused = str(device_dict["fsused"])
self.size = str(block_device["size"]) self.mountpoints = device_dict["mountpoints"]
self.model = block_device["model"] self.label = device_dict["label"]
self.serial = block_device["serial"] self.uuid = device_dict["uuid"]
self.type = block_device["type"] self.size = str(device_dict["size"])
self.model = device_dict["model"]
self.serial = device_dict["serial"]
self.type = device_dict["type"]
self.locked = False self.locked = False
self.children: typing.List[BlockDevice] = []
if "children" in device_dict.keys():
for child in device_dict["children"]:
self.children.append(BlockDevice(child))
def all_children(self) -> typing.List[BlockDevice]:
result = []
for child in self.children:
result.extend(child.all_children())
result.append(child)
return result
def __str__(self): def __str__(self):
return self.name return self.name
@ -82,17 +99,7 @@ class BlockDevice:
Update current data and return a dictionary of stats. Update current data and return a dictionary of stats.
""" """
device = get_block_device(self.name) device = get_block_device(self.name)
self.fsavail = str(device["fsavail"]) self.update_from_dict(device)
self.fssize = str(device["fssize"])
self.fstype = device["fstype"]
self.fsused = str(device["fsused"])
self.mountpoints = device["mountpoints"]
self.label = device["label"]
self.uuid = device["uuid"]
self.size = str(device["size"])
self.model = device["model"]
self.serial = device["serial"]
self.type = device["type"]
return { return {
"name": self.name, "name": self.name,
@ -110,6 +117,14 @@ class BlockDevice:
"type": self.type, "type": self.type,
} }
def is_usable_partition(self):
# Ignore devices with type "rom"
if self.type == "rom":
return False
if self.fstype == "ext4":
return True
return False
def resize(self): def resize(self):
""" """
Resize the block device. Resize the block device.
@ -165,41 +180,16 @@ class BlockDevices(metaclass=SingletonMetaclass):
""" """
Update the list of block devices. Update the list of block devices.
""" """
devices = [] devices = BlockDevices.lsblk_devices()
lsblk_output = subprocess.check_output(
[ children = []
"lsblk",
"-J",
"-b",
"-o",
"NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINTS,LABEL,UUID,SIZE,MODEL,SERIAL,TYPE",
]
)
lsblk_output = lsblk_output.decode("utf-8")
lsblk_output = json.loads(lsblk_output)
for device in lsblk_output["blockdevices"]:
# Ignore devices with type "rom"
if device["type"] == "rom":
continue
# Ignore iso9660 devices
if device["fstype"] == "iso9660":
continue
if device["fstype"] is None:
if "children" in device:
for child in device["children"]:
if child["fstype"] == "ext4":
device = child
break
devices.append(device)
# Add new devices and delete non-existent devices
for device in devices: for device in devices:
if device["name"] not in [ children.extend(device.all_children())
block_device.name for block_device in self.block_devices devices.extend(children)
]:
self.block_devices.append(BlockDevice(device)) valid_devices = [device for device in devices if device.is_usable_partition()]
for block_device in self.block_devices:
if block_device.name not in [device["name"] for device in devices]: self.block_devices = valid_devices
self.block_devices.remove(block_device)
def get_block_device(self, name: str) -> typing.Optional[BlockDevice]: def get_block_device(self, name: str) -> typing.Optional[BlockDevice]:
""" """
@ -236,3 +226,25 @@ class BlockDevices(metaclass=SingletonMetaclass):
if "/" in block_device.mountpoints: if "/" in block_device.mountpoints:
return block_device return block_device
raise RuntimeError("No root block device found") raise RuntimeError("No root block device found")
@staticmethod
def lsblk_device_dicts() -> typing.List[dict]:
lsblk_output_bytes = subprocess.check_output(
[
"lsblk",
"-J",
"-b",
"-o",
"NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINTS,LABEL,UUID,SIZE,MODEL,SERIAL,TYPE",
]
)
lsblk_output = lsblk_output_bytes.decode("utf-8")
return json.loads(lsblk_output)["blockdevices"]
@staticmethod
def lsblk_devices() -> typing.List[BlockDevice]:
devices = []
for device in BlockDevices.lsblk_device_dicts():
devices.append(device)
return [BlockDevice(device) for device in devices]

View file

@ -2,7 +2,7 @@ from setuptools import setup, find_packages
setup( setup(
name="selfprivacy_api", name="selfprivacy_api",
version="2.4.3", version="3.0.0",
packages=find_packages(), packages=find_packages(),
scripts=[ scripts=[
"selfprivacy_api/app.py", "selfprivacy_api/app.py",

View file

@ -67,6 +67,10 @@ def generate_backup_query(query_array):
return "query TestBackup {\n backup {" + "\n".join(query_array) + "}\n}" return "query TestBackup {\n backup {" + "\n".join(query_array) + "}\n}"
def generate_service_query(query_array):
return "query TestService {\n services {" + "\n".join(query_array) + "}\n}"
def mnemonic_to_hex(mnemonic): def mnemonic_to_hex(mnemonic):
return Mnemonic(language="english").to_entropy(mnemonic).hex() return Mnemonic(language="english").to_entropy(mnemonic).hex()

View file

@ -3,12 +3,19 @@
# pylint: disable=unused-argument # pylint: disable=unused-argument
import os import os
import pytest import pytest
from os import path
from fastapi.testclient import TestClient
import os.path as path
import datetime import datetime
from os import path
from os import makedirs
from typing import Generator
from fastapi.testclient import TestClient
from selfprivacy_api.utils.huey import huey
import selfprivacy_api.services as services
from selfprivacy_api.services import get_service_by_id, Service
from selfprivacy_api.services.test_service import DummyService
from selfprivacy_api.models.tokens.token import Token from selfprivacy_api.models.tokens.token import Token
from selfprivacy_api.repositories.tokens.json_tokens_repository import ( from selfprivacy_api.repositories.tokens.json_tokens_repository import (
JsonTokensRepository, JsonTokensRepository,
@ -19,6 +26,9 @@ from selfprivacy_api.repositories.tokens.redis_tokens_repository import (
from tests.common import read_json from tests.common import read_json
TESTFILE_BODY = "testytest!"
TESTFILE_2_BODY = "testissimo!"
EMPTY_TOKENS_JSON = ' {"tokens": []}' EMPTY_TOKENS_JSON = ' {"tokens": []}'
@ -147,3 +157,49 @@ def wrong_auth_client(tokens_file, huey_database, jobs_file):
client = TestClient(app) client = TestClient(app)
client.headers.update({"Authorization": "Bearer WRONG_TOKEN"}) client.headers.update({"Authorization": "Bearer WRONG_TOKEN"})
return client return client
@pytest.fixture()
def raw_dummy_service(tmpdir):
dirnames = ["test_service", "also_test_service"]
service_dirs = []
for d in dirnames:
service_dir = path.join(tmpdir, d)
makedirs(service_dir)
service_dirs.append(service_dir)
testfile_path_1 = path.join(service_dirs[0], "testfile.txt")
with open(testfile_path_1, "w") as file:
file.write(TESTFILE_BODY)
testfile_path_2 = path.join(service_dirs[1], "testfile2.txt")
with open(testfile_path_2, "w") as file:
file.write(TESTFILE_2_BODY)
# we need this to not change get_folders() much
class TestDummyService(DummyService, folders=service_dirs):
pass
service = TestDummyService()
# assert pickle.dumps(service) is not None
return service
@pytest.fixture()
def dummy_service(
tmpdir, raw_dummy_service, generic_userdata
) -> Generator[Service, None, None]:
service = raw_dummy_service
# register our service
services.services.append(service)
huey.immediate = True
assert huey.immediate is True
assert get_service_by_id(service.get_id()) is not None
service.enable()
yield service
# cleanup because apparently it matters wrt tasks
services.services.remove(service)

View file

@ -1,60 +1,55 @@
{ {
"api": { "api": {"token": "TEST_TOKEN", "enableSwagger": false},
"token": "TEST_TOKEN",
"enableSwagger": false
},
"bitwarden": {
"enable": true
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
"hostname": "test-instance", "hostname": "test-instance",
"nextcloud": {
"adminPassword": "ADMIN",
"databasePassword": "ADMIN",
"enable": true
},
"resticPassword": "PASS", "resticPassword": "PASS",
"ssh": { "ssh": {
"enable": true, "enable": true,
"passwordAuthentication": true, "passwordAuthentication": true,
"rootKeys": [ "rootKeys": ["ssh-ed25519 KEY test@pc"]
"ssh-ed25519 KEY test@pc"
]
}, },
"username": "tester", "username": "tester",
"gitea": { "autoUpgrade": {"enable": true, "allowReboot": true},
"enable": true "useBinds": true,
},
"ocserv": {
"enable": true
},
"pleroma": {
"enable": true
},
"jitsi": {
"enable": true
},
"autoUpgrade": {
"enable": true,
"allowReboot": true
},
"timezone": "Europe/Moscow", "timezone": "Europe/Moscow",
"sshKeys": [ "sshKeys": ["ssh-rsa KEY test@pc"],
"ssh-rsa KEY test@pc" "dns": {"provider": "CLOUDFLARE", "apiKey": "TOKEN"},
], "server": {"provider": "HETZNER"},
"dns": { "modules": {
"provider": "CLOUDFLARE", "bitwarden": {"enable": true},
"apiKey": "TOKEN" "gitea": {"enable": true},
}, "ocserv": {"enable": true},
"server": { "pleroma": {"enable": true},
"provider": "HETZNER" "jitsi": {"enable": true},
"nextcloud": {
"adminPassword": "ADMIN",
"databasePassword": "ADMIN",
"enable": true
}
}, },
"backup": { "backup": {
"provider": "BACKBLAZE", "provider": "BACKBLAZE",
"accountId": "ID", "accountId": "ID",
"accountKey": "KEY", "accountKey": "KEY",
"bucket": "selfprivacy" "bucket": "selfprivacy"
},
"users": [
{
"username": "user1",
"hashedPassword": "HASHED_PASSWORD_1",
"sshKeys": ["ssh-rsa KEY user1@pc"]
},
{
"username": "user2",
"hashedPassword": "HASHED_PASSWORD_2",
"sshKeys": ["ssh-rsa KEY user2@pc"]
},
{
"username": "user3",
"hashedPassword": "HASHED_PASSWORD_3",
"sshKeys": ["ssh-rsa KEY user3@pc"]
} }
]
} }

View file

@ -2,7 +2,6 @@ import pytest
import os import os
import os.path as path import os.path as path
from os import makedirs
from os import remove from os import remove
from os import listdir from os import listdir
from os import urandom from os import urandom
@ -13,7 +12,10 @@ import tempfile
from selfprivacy_api.utils.huey import huey from selfprivacy_api.utils.huey import huey
import selfprivacy_api.services as services import tempfile
from selfprivacy_api.utils.huey import huey
from selfprivacy_api.services import Service, get_all_services from selfprivacy_api.services import Service, get_all_services
from selfprivacy_api.services import get_service_by_id from selfprivacy_api.services import get_service_by_id
from selfprivacy_api.services.service import ServiceStatus from selfprivacy_api.services.service import ServiceStatus
@ -46,13 +48,13 @@ from selfprivacy_api.backup.tasks import (
from selfprivacy_api.backup.storage import Storage from selfprivacy_api.backup.storage import Storage
TESTFILE_BODY = "testytest!"
TESTFILE_2_BODY = "testissimo!"
REPO_NAME = "test_backup" REPO_NAME = "test_backup"
REPOFILE_NAME = "totallyunrelated"
def prepare_localfile_backups(temp_dir): def prepare_localfile_backups(temp_dir):
test_repo_path = path.join(temp_dir, "totallyunrelated") test_repo_path = path.join(temp_dir, REPOFILE_NAME)
assert not path.exists(test_repo_path) assert not path.exists(test_repo_path)
Backups.set_localfile_repo(test_repo_path) Backups.set_localfile_repo(test_repo_path)
@ -67,16 +69,24 @@ def backups_local(tmpdir):
@pytest.fixture(scope="function") @pytest.fixture(scope="function")
def backups(tmpdir): def backups(tmpdir):
# for those tests that are supposed to pass with any repo """
For those tests that are supposed to pass with
both local and cloud repos
"""
# Sometimes this is false. Idk why.
huey.immediate = True
assert huey.immediate is True
Backups.reset() Backups.reset()
if BACKUP_PROVIDER_ENVS["kind"] in os.environ.keys(): if BACKUP_PROVIDER_ENVS["kind"] in os.environ.keys():
Backups.set_provider_from_envs() Backups.set_provider_from_envs()
else: else:
prepare_localfile_backups(tmpdir) prepare_localfile_backups(tmpdir)
Jobs.reset() Jobs.reset()
# assert not repo_path
Backups.init_repo() Backups.init_repo()
assert Backups.provider().location == str(tmpdir) + "/" + REPOFILE_NAME
yield yield
Backups.erase_repo() Backups.erase_repo()
@ -86,49 +96,6 @@ def backups_backblaze(generic_userdata):
Backups.reset(reset_json=False) Backups.reset(reset_json=False)
@pytest.fixture()
def raw_dummy_service(tmpdir):
dirnames = ["test_service", "also_test_service"]
service_dirs = []
for d in dirnames:
service_dir = path.join(tmpdir, d)
makedirs(service_dir)
service_dirs.append(service_dir)
testfile_path_1 = path.join(service_dirs[0], "testfile.txt")
with open(testfile_path_1, "w") as file:
file.write(TESTFILE_BODY)
testfile_path_2 = path.join(service_dirs[1], "testfile2.txt")
with open(testfile_path_2, "w") as file:
file.write(TESTFILE_2_BODY)
# we need this to not change get_folders() much
class TestDummyService(DummyService, folders=service_dirs):
pass
service = TestDummyService()
return service
@pytest.fixture()
def dummy_service(tmpdir, backups, raw_dummy_service) -> Service:
service = raw_dummy_service
# register our service
services.services.append(service)
# make sure we are in immediate mode because this thing is non pickleable to store on queue.
huey.immediate = True
assert huey.immediate is True
assert get_service_by_id(service.get_id()) is not None
yield service
# cleanup because apparently it matters wrt tasks
services.services.remove(service)
@pytest.fixture() @pytest.fixture()
def memory_backup() -> AbstractBackupProvider: def memory_backup() -> AbstractBackupProvider:
ProviderClass = providers.get_provider(BackupProvider.MEMORY) ProviderClass = providers.get_provider(BackupProvider.MEMORY)
@ -777,7 +744,7 @@ def simulated_service_stopping_delay(request) -> float:
def test_backup_service_task(backups, dummy_service, simulated_service_stopping_delay): def test_backup_service_task(backups, dummy_service, simulated_service_stopping_delay):
dummy_service.set_delay(simulated_service_stopping_delay) dummy_service.set_delay(simulated_service_stopping_delay)
handle = start_backup(dummy_service) handle = start_backup(dummy_service.get_id())
handle(blocking=True) handle(blocking=True)
snaps = Backups.get_snapshots(dummy_service) snaps = Backups.get_snapshots(dummy_service)
@ -822,7 +789,7 @@ def test_backup_larger_file(backups, dummy_service):
mega = 2**20 mega = 2**20
make_large_file(dir, 100 * mega) make_large_file(dir, 100 * mega)
handle = start_backup(dummy_service) handle = start_backup(dummy_service.get_id())
handle(blocking=True) handle(blocking=True)
# results will be slightly different on different machines. if someone has troubles with it on their machine, consider dropping this test. # results will be slightly different on different machines. if someone has troubles with it on their machine, consider dropping this test.

View file

@ -416,32 +416,37 @@ def lsblk_full_mock(mocker):
def test_get_block_devices(lsblk_full_mock, authorized_client): def test_get_block_devices(lsblk_full_mock, authorized_client):
block_devices = BlockDevices().get_block_devices() block_devices = BlockDevices().get_block_devices()
assert len(block_devices) == 2 assert len(block_devices) == 2
assert block_devices[0].name == "sda1" devices_by_name = {device.name: device for device in block_devices}
assert block_devices[0].path == "/dev/sda1" sda1 = devices_by_name["sda1"]
assert block_devices[0].fsavail == "4605702144" sdb = devices_by_name["sdb"]
assert block_devices[0].fssize == "19814920192"
assert block_devices[0].fstype == "ext4" assert sda1.name == "sda1"
assert block_devices[0].fsused == "14353719296" assert sda1.path == "/dev/sda1"
assert block_devices[0].mountpoints == ["/nix/store", "/"] assert sda1.fsavail == "4605702144"
assert block_devices[0].label is None assert sda1.fssize == "19814920192"
assert block_devices[0].uuid == "ec80c004-baec-4a2c-851d-0e1807135511" assert sda1.fstype == "ext4"
assert block_devices[0].size == "20210236928" assert sda1.fsused == "14353719296"
assert block_devices[0].model is None assert sda1.mountpoints == ["/nix/store", "/"]
assert block_devices[0].serial is None assert sda1.label is None
assert block_devices[0].type == "part" assert sda1.uuid == "ec80c004-baec-4a2c-851d-0e1807135511"
assert block_devices[1].name == "sdb" assert sda1.size == "20210236928"
assert block_devices[1].path == "/dev/sdb" assert sda1.model is None
assert block_devices[1].fsavail == "11888545792" assert sda1.serial is None
assert block_devices[1].fssize == "12573614080" assert sda1.type == "part"
assert block_devices[1].fstype == "ext4"
assert block_devices[1].fsused == "24047616" assert sdb.name == "sdb"
assert block_devices[1].mountpoints == ["/volumes/sdb"] assert sdb.path == "/dev/sdb"
assert block_devices[1].label is None assert sdb.fsavail == "11888545792"
assert block_devices[1].uuid == "fa9d0026-ee23-4047-b8b1-297ae16fa751" assert sdb.fssize == "12573614080"
assert block_devices[1].size == "12884901888" assert sdb.fstype == "ext4"
assert block_devices[1].model == "Volume" assert sdb.fsused == "24047616"
assert block_devices[1].serial == "21378102" assert sdb.mountpoints == ["/volumes/sdb"]
assert block_devices[1].type == "disk" assert sdb.label is None
assert sdb.uuid == "fa9d0026-ee23-4047-b8b1-297ae16fa751"
assert sdb.size == "12884901888"
assert sdb.model == "Volume"
assert sdb.serial == "21378102"
assert sdb.type == "disk"
def test_get_block_device(lsblk_full_mock, authorized_client): def test_get_block_device(lsblk_full_mock, authorized_client):
@ -506,3 +511,30 @@ def test_get_root_block_device(lsblk_full_mock, authorized_client):
assert block_device.model is None assert block_device.model is None
assert block_device.serial is None assert block_device.serial is None
assert block_device.type == "part" assert block_device.type == "part"
# Unassuming sanity check, yes this did fail
def test_get_real_devices():
block_devices = BlockDevices().get_block_devices()
assert block_devices is not None
assert len(block_devices) > 0
# Unassuming sanity check
def test_get_real_root_device():
devices = BlockDevices().get_block_devices()
try:
block_device = BlockDevices().get_root_block_device()
except Exception as e:
raise Exception("cannot get root device:", e, "devices found:", devices)
assert block_device is not None
assert block_device.name is not None
assert block_device.name != ""
def test_get_real_root_device_raw(authorized_client):
block_device = BlockDevices().get_root_block_device()
assert block_device is not None
assert block_device.name is not None
assert block_device.name != ""

View file

@ -1,6 +1,5 @@
# pylint: disable=redefined-outer-name # pylint: disable=redefined-outer-name
# pylint: disable=unused-argument # pylint: disable=unused-argument
import json
import os import os
import pytest import pytest

60
tests/test_dkim.py Normal file
View file

@ -0,0 +1,60 @@
import pytest
import os
from os import path
from tests.conftest import global_data_dir
from selfprivacy_api.utils import get_dkim_key, get_domain
###############################################################################
DKIM_FILE_CONTENT = b'selector._domainkey\tIN\tTXT\t( "v=DKIM1; k=rsa; "\n\t "p=MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDNn/IhEz1SxgHxxxI8vlPYC2dNueiLe1GC4SYz8uHimC8SDkMvAwm7rqi2SimbFgGB5nccCNOqCkrIqJTCB9vufqBnVKAjshHqpOr5hk4JJ1T/AGQKWinstmDbfTLPYTbU8ijZrwwGeqQLlnXR5nSN0GB9GazheA9zaPsT6PV+aQIDAQAB" ) ; ----- DKIM key selector for test-domain.tld\n'
@pytest.fixture
def dkim_file(mocker, domain_file, tmpdir):
domain = get_domain()
assert domain is not None
assert domain != ""
filename = domain + ".selector.txt"
dkim_path = path.join(tmpdir, filename)
with open(dkim_path, "wb") as file:
file.write(DKIM_FILE_CONTENT)
mocker.patch("selfprivacy_api.utils.DKIM_DIR", tmpdir)
return dkim_path
@pytest.fixture
def domain_file(mocker):
# TODO: move to conftest. Challenge: it does not behave with "/" like pytest datadir does
domain_path = path.join(global_data_dir(), "domain")
mocker.patch("selfprivacy_api.utils.DOMAIN_FILE", domain_path)
return domain_path
@pytest.fixture
def no_dkim_file(dkim_file):
os.remove(dkim_file)
assert path.exists(dkim_file) is False
return dkim_file
###############################################################################
def test_get_dkim_key(domain_file, dkim_file):
"""Test DKIM key"""
dkim_key = get_dkim_key("test-domain.tld")
assert (
dkim_key
== "v=DKIM1; k=rsa; p=MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDNn/IhEz1SxgHxxxI8vlPYC2dNueiLe1GC4SYz8uHimC8SDkMvAwm7rqi2SimbFgGB5nccCNOqCkrIqJTCB9vufqBnVKAjshHqpOr5hk4JJ1T/AGQKWinstmDbfTLPYTbU8ijZrwwGeqQLlnXR5nSN0GB9GazheA9zaPsT6PV+aQIDAQAB"
)
def test_no_dkim_key(domain_file, no_dkim_file):
"""Test no DKIM key"""
dkim_key = get_dkim_key("test-domain.tld")
assert dkim_key is None

View file

@ -1,89 +0,0 @@
from tests.common import generate_api_query
from tests.conftest import TOKENS_FILE_CONTENTS, DEVICE_WE_AUTH_TESTS_WITH
ORIGINAL_DEVICES = TOKENS_FILE_CONTENTS["tokens"]
def assert_ok(response, request):
data = assert_data(response)
assert data[request]["success"] is True
assert data[request]["message"] is not None
assert data[request]["code"] == 200
def assert_errorcode(response, request, code):
data = assert_data(response)
assert data[request]["success"] is False
assert data[request]["message"] is not None
assert data[request]["code"] == code
def assert_empty(response):
assert response.status_code == 200
assert response.json().get("data") is None
def assert_data(response):
assert response.status_code == 200
data = response.json().get("data")
assert data is not None
assert "api" in data.keys()
return data["api"]
API_DEVICES_QUERY = """
devices {
creationDate
isCaller
name
}
"""
def request_devices(client):
return client.post(
"/graphql",
json={"query": generate_api_query([API_DEVICES_QUERY])},
)
def graphql_get_devices(client):
response = request_devices(client)
data = assert_data(response)
devices = data["devices"]
assert devices is not None
return devices
def set_client_token(client, token):
client.headers.update({"Authorization": "Bearer " + token})
def assert_token_valid(client, token):
set_client_token(client, token)
assert graphql_get_devices(client) is not None
def assert_same(graphql_devices, abstract_devices):
"""Orderless comparison"""
assert len(graphql_devices) == len(abstract_devices)
for original_device in abstract_devices:
assert original_device["name"] in [device["name"] for device in graphql_devices]
for device in graphql_devices:
if device["name"] == original_device["name"]:
assert device["creationDate"] == original_device["date"].isoformat()
def assert_original(client):
devices = graphql_get_devices(client)
assert_original_devices(devices)
def assert_original_devices(devices):
assert_same(devices, ORIGINAL_DEVICES)
for device in devices:
if device["name"] == DEVICE_WE_AUTH_TESTS_WITH["name"]:
assert device["isCaller"] is True
else:
assert device["isCaller"] is False

View file

@ -4,18 +4,20 @@ from tests.conftest import TOKENS_FILE_CONTENTS, DEVICE_WE_AUTH_TESTS_WITH
ORIGINAL_DEVICES = TOKENS_FILE_CONTENTS["tokens"] ORIGINAL_DEVICES = TOKENS_FILE_CONTENTS["tokens"]
def assert_ok(response, request): def assert_ok(output: dict, code=200) -> None:
data = assert_data(response) if output["success"] is False:
data[request]["success"] is True # convenience for debugging, this should display error
data[request]["message"] is not None # if message is empty, consider adding helpful messages
data[request]["code"] == 200 raise ValueError(output["code"], output["message"])
assert output["success"] is True
assert output["message"] is not None
assert output["code"] == code
def assert_errorcode(response, request, code): def assert_errorcode(output: dict, code) -> None:
data = assert_data(response) assert output["success"] is False
data[request]["success"] is False assert output["message"] is not None
data[request]["message"] is not None assert output["code"] == code
data[request]["code"] == code
def assert_empty(response): def assert_empty(response):
@ -23,9 +25,15 @@ def assert_empty(response):
assert response.json().get("data") is None assert response.json().get("data") is None
def assert_data(response): def get_data(response):
assert response.status_code == 200 assert response.status_code == 200
data = response.json().get("data") response = response.json()
if (
"errors" in response.keys()
): # convenience for debugging, this will display error
raise ValueError(response["errors"])
data = response.get("data")
assert data is not None assert data is not None
return data return data
@ -48,7 +56,7 @@ def request_devices(client):
def graphql_get_devices(client): def graphql_get_devices(client):
response = request_devices(client) response = request_devices(client)
data = assert_data(response) data = get_data(response)
devices = data["api"]["devices"] devices = data["api"]["devices"]
assert devices is not None assert devices is not None
return devices return devices

View file

@ -1,5 +1,5 @@
from os import path from os import path
from tests.test_graphql.test_backup import dummy_service, backups, raw_dummy_service from tests.test_backup import backups
from tests.common import generate_backup_query from tests.common import generate_backup_query
@ -279,7 +279,7 @@ def get_data(response):
if ( if (
"errors" in response.keys() "errors" in response.keys()
): # convenience for debugging, this will display error ): # convenience for debugging, this will display error
assert response["errors"] == [] raise ValueError(response["errors"])
assert response["data"] is not None assert response["data"] is not None
data = response["data"] data = response["data"]
return data return data
@ -301,7 +301,7 @@ def test_dummy_service_convertible_to_gql(dummy_service):
assert gql_service is not None assert gql_service is not None
def test_snapshots_empty(authorized_client, dummy_service): def test_snapshots_empty(authorized_client, dummy_service, backups):
snaps = api_snapshots(authorized_client) snaps = api_snapshots(authorized_client)
assert snaps == [] assert snaps == []

View file

@ -8,8 +8,8 @@ from tests.common import (
generate_api_query, generate_api_query,
) )
from tests.conftest import DEVICE_WE_AUTH_TESTS_WITH, TOKENS_FILE_CONTENTS from tests.conftest import DEVICE_WE_AUTH_TESTS_WITH, TOKENS_FILE_CONTENTS
from tests.test_graphql.api_common import ( from tests.test_graphql.common import (
assert_data, get_data,
assert_empty, assert_empty,
assert_ok, assert_ok,
assert_errorcode, assert_errorcode,
@ -36,7 +36,7 @@ def graphql_get_new_device_key(authorized_client) -> str:
"/graphql", "/graphql",
json={"query": NEW_DEVICE_KEY_MUTATION}, json={"query": NEW_DEVICE_KEY_MUTATION},
) )
assert_ok(response, "getNewDeviceApiKey") assert_ok(get_data(response)["api"]["getNewDeviceApiKey"])
key = response.json()["data"]["api"]["getNewDeviceApiKey"]["key"] key = response.json()["data"]["api"]["getNewDeviceApiKey"]["key"]
assert key.split(" ").__len__() == 12 assert key.split(" ").__len__() == 12
@ -60,9 +60,10 @@ def graphql_try_auth_new_device(client, mnemonic_key, device_name):
def graphql_authorize_new_device(client, mnemonic_key, device_name) -> str: def graphql_authorize_new_device(client, mnemonic_key, device_name) -> str:
response = graphql_try_auth_new_device(client, mnemonic_key, "new_device") response = graphql_try_auth_new_device(client, mnemonic_key, "new_device")
assert_ok(response, "authorizeWithNewDeviceApiKey") assert_ok(get_data(response)["api"]["authorizeWithNewDeviceApiKey"])
token = response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["token"] token = response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["token"]
assert_token_valid(client, token) assert_token_valid(client, token)
return token
def test_graphql_tokens_info(authorized_client, tokens_file): def test_graphql_tokens_info(authorized_client, tokens_file):
@ -114,7 +115,7 @@ def test_graphql_delete_token(authorized_client, tokens_file):
}, },
}, },
) )
assert_ok(response, "deleteDeviceApiToken") assert_ok(get_data(response)["api"]["deleteDeviceApiToken"])
devices = graphql_get_devices(authorized_client) devices = graphql_get_devices(authorized_client)
assert_same(devices, test_devices) assert_same(devices, test_devices)
@ -130,7 +131,7 @@ def test_graphql_delete_self_token(authorized_client, tokens_file):
}, },
}, },
) )
assert_errorcode(response, "deleteDeviceApiToken", 400) assert_errorcode(get_data(response)["api"]["deleteDeviceApiToken"], 400)
assert_original(authorized_client) assert_original(authorized_client)
@ -147,7 +148,7 @@ def test_graphql_delete_nonexistent_token(
}, },
}, },
) )
assert_errorcode(response, "deleteDeviceApiToken", 404) assert_errorcode(get_data(response)["api"]["deleteDeviceApiToken"], 404)
assert_original(authorized_client) assert_original(authorized_client)
@ -180,7 +181,7 @@ def test_graphql_refresh_token(authorized_client, client, tokens_file):
"/graphql", "/graphql",
json={"query": REFRESH_TOKEN_MUTATION}, json={"query": REFRESH_TOKEN_MUTATION},
) )
assert_ok(response, "refreshDeviceApiToken") assert_ok(get_data(response)["api"]["refreshDeviceApiToken"])
new_token = response.json()["data"]["api"]["refreshDeviceApiToken"]["token"] new_token = response.json()["data"]["api"]["refreshDeviceApiToken"]["token"]
assert_token_valid(client, new_token) assert_token_valid(client, new_token)
@ -250,10 +251,10 @@ def test_graphql_get_and_delete_new_device_key(client, authorized_client, tokens
"/graphql", "/graphql",
json={"query": INVALIDATE_NEW_DEVICE_KEY_MUTATION}, json={"query": INVALIDATE_NEW_DEVICE_KEY_MUTATION},
) )
assert_ok(response, "invalidateNewDeviceApiKey") assert_ok(get_data(response)["api"]["invalidateNewDeviceApiKey"])
response = graphql_try_auth_new_device(client, mnemonic_key, "new_device") response = graphql_try_auth_new_device(client, mnemonic_key, "new_device")
assert_errorcode(response, "authorizeWithNewDeviceApiKey", 404) assert_errorcode(get_data(response)["api"]["authorizeWithNewDeviceApiKey"], 404)
AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION = """ AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION = """
@ -285,7 +286,7 @@ def test_graphql_authorize_new_device_with_invalid_key(
client, authorized_client, tokens_file client, authorized_client, tokens_file
): ):
response = graphql_try_auth_new_device(client, "invalid_token", "new_device") response = graphql_try_auth_new_device(client, "invalid_token", "new_device")
assert_errorcode(response, "authorizeWithNewDeviceApiKey", 404) assert_errorcode(get_data(response)["api"]["authorizeWithNewDeviceApiKey"], 404)
assert_original(authorized_client) assert_original(authorized_client)
@ -297,7 +298,7 @@ def test_graphql_get_and_authorize_used_key(client, authorized_client, tokens_fi
devices = graphql_get_devices(authorized_client) devices = graphql_get_devices(authorized_client)
response = graphql_try_auth_new_device(client, mnemonic_key, "new_device2") response = graphql_try_auth_new_device(client, mnemonic_key, "new_device2")
assert_errorcode(response, "authorizeWithNewDeviceApiKey", 404) assert_errorcode(get_data(response)["api"]["authorizeWithNewDeviceApiKey"], 404)
assert graphql_get_devices(authorized_client) == devices assert graphql_get_devices(authorized_client) == devices
@ -309,7 +310,7 @@ def test_graphql_get_and_authorize_key_after_12_minutes(
mock = mocker.patch(DEVICE_KEY_VALIDATION_DATETIME, NearFuture) mock = mocker.patch(DEVICE_KEY_VALIDATION_DATETIME, NearFuture)
response = graphql_try_auth_new_device(client, mnemonic_key, "new_device") response = graphql_try_auth_new_device(client, mnemonic_key, "new_device")
assert_errorcode(response, "authorizeWithNewDeviceApiKey", 404) assert_errorcode(get_data(response)["api"]["authorizeWithNewDeviceApiKey"], 404)
def test_graphql_authorize_without_token( def test_graphql_authorize_without_token(

View file

@ -18,9 +18,9 @@ from tests.common import five_minutes_into_future_naive_utc as five_minutes_into
from tests.common import five_minutes_into_future as five_minutes_into_future_tz from tests.common import five_minutes_into_future as five_minutes_into_future_tz
from tests.common import five_minutes_into_past_naive_utc as five_minutes_into_past from tests.common import five_minutes_into_past_naive_utc as five_minutes_into_past
from tests.test_graphql.api_common import ( from tests.test_graphql.common import (
assert_empty, assert_empty,
assert_data, get_data,
assert_ok, assert_ok,
assert_errorcode, assert_errorcode,
assert_token_valid, assert_token_valid,
@ -49,9 +49,9 @@ def request_recovery_status(client):
def graphql_recovery_status(client): def graphql_recovery_status(client):
response = request_recovery_status(client) response = request_recovery_status(client)
data = assert_data(response) data = get_data(response)
status = data["recoveryKey"] status = data["api"]["recoveryKey"]
assert status is not None assert status is not None
return status return status
@ -74,8 +74,10 @@ def request_make_new_recovery_key(client, expires_at=None, uses=None):
def graphql_make_new_recovery_key(client, expires_at=None, uses=None): def graphql_make_new_recovery_key(client, expires_at=None, uses=None):
response = request_make_new_recovery_key(client, expires_at, uses) response = request_make_new_recovery_key(client, expires_at, uses)
assert_ok(response, "getNewRecoveryApiKey") output = get_data(response)["api"]["getNewRecoveryApiKey"]
key = response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] assert_ok(output)
key = output["key"]
assert key is not None assert key is not None
assert key.split(" ").__len__() == 18 assert key.split(" ").__len__() == 18
return key return key
@ -98,8 +100,10 @@ def request_recovery_auth(client, key, device_name):
def graphql_use_recovery_key(client, key, device_name): def graphql_use_recovery_key(client, key, device_name):
response = request_recovery_auth(client, key, device_name) response = request_recovery_auth(client, key, device_name)
assert_ok(response, "useRecoveryApiKey") output = get_data(response)["api"]["useRecoveryApiKey"]
token = response.json()["data"]["api"]["useRecoveryApiKey"]["token"] assert_ok(output)
token = output["token"]
assert token is not None assert token is not None
assert_token_valid(client, token) assert_token_valid(client, token)
set_client_token(client, token) set_client_token(client, token)
@ -198,8 +202,10 @@ def test_graphql_use_recovery_key_after_expiration(
mock = mocker.patch(RECOVERY_KEY_VALIDATION_DATETIME, NearFuture) mock = mocker.patch(RECOVERY_KEY_VALIDATION_DATETIME, NearFuture)
response = request_recovery_auth(client, key, "new_test_token3") response = request_recovery_auth(client, key, "new_test_token3")
assert_errorcode(response, "useRecoveryApiKey", 404) output = get_data(response)["api"]["useRecoveryApiKey"]
assert response.json()["data"]["api"]["useRecoveryApiKey"]["token"] is None assert_errorcode(output, 404)
assert output["token"] is None
assert_original(authorized_client) assert_original(authorized_client)
status = graphql_recovery_status(authorized_client) status = graphql_recovery_status(authorized_client)
@ -222,8 +228,10 @@ def test_graphql_generate_recovery_key_with_expiration_in_the_past(
authorized_client, expires_at=expiration_date authorized_client, expires_at=expiration_date
) )
assert_errorcode(response, "getNewRecoveryApiKey", 400) output = get_data(response)["api"]["getNewRecoveryApiKey"]
assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] is None assert_errorcode(output, 400)
assert output["key"] is None
assert graphql_recovery_status(authorized_client)["exists"] is False assert graphql_recovery_status(authorized_client)["exists"] is False
@ -280,7 +288,8 @@ def test_graphql_generate_recovery_key_with_limited_uses(
assert status["usesLeft"] == 0 assert status["usesLeft"] == 0
response = request_recovery_auth(client, mnemonic_key, "new_test_token3") response = request_recovery_auth(client, mnemonic_key, "new_test_token3")
assert_errorcode(response, "useRecoveryApiKey", 404) output = get_data(response)["api"]["useRecoveryApiKey"]
assert_errorcode(output, 404)
def test_graphql_generate_recovery_key_with_negative_uses( def test_graphql_generate_recovery_key_with_negative_uses(
@ -288,13 +297,16 @@ def test_graphql_generate_recovery_key_with_negative_uses(
): ):
response = request_make_new_recovery_key(authorized_client, uses=-1) response = request_make_new_recovery_key(authorized_client, uses=-1)
assert_errorcode(response, "getNewRecoveryApiKey", 400) output = get_data(response)["api"]["getNewRecoveryApiKey"]
assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] is None assert_errorcode(output, 400)
assert output["key"] is None
assert graphql_recovery_status(authorized_client)["exists"] is False
def test_graphql_generate_recovery_key_with_zero_uses(authorized_client, tokens_file): def test_graphql_generate_recovery_key_with_zero_uses(authorized_client, tokens_file):
response = request_make_new_recovery_key(authorized_client, uses=0) response = request_make_new_recovery_key(authorized_client, uses=0)
assert_errorcode(response, "getNewRecoveryApiKey", 400) output = get_data(response)["api"]["getNewRecoveryApiKey"]
assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] is None assert_errorcode(output, 400)
assert output["key"] is None
assert graphql_recovery_status(authorized_client)["exists"] is False assert graphql_recovery_status(authorized_client)["exists"] is False

View file

@ -0,0 +1,517 @@
import pytest
from typing import Generator
from selfprivacy_api.utils.block_devices import BlockDevices
import selfprivacy_api.services as service_module
from selfprivacy_api.services import get_service_by_id
from selfprivacy_api.services.service import Service, ServiceStatus
from selfprivacy_api.services.test_service import DummyService
from tests.common import generate_service_query
from tests.test_graphql.common import assert_empty, assert_ok, get_data
@pytest.fixture()
def only_dummy_service(dummy_service) -> Generator[DummyService, None, None]:
# because queries to services that are not really there error out
back_copy = service_module.services.copy()
service_module.services.clear()
service_module.services.append(dummy_service)
yield dummy_service
service_module.services.clear()
service_module.services.extend(back_copy)
API_START_MUTATION = """
mutation TestStartService($service_id: String!) {
services {
startService(serviceId: $service_id) {
success
message
code
service {
id
status
}
}
}
}
"""
API_RESTART_MUTATION = """
mutation TestRestartService($service_id: String!) {
services {
restartService(serviceId: $service_id) {
success
message
code
service {
id
status
}
}
}
}
"""
API_ENABLE_MUTATION = """
mutation TestStartService($service_id: String!) {
services {
enableService(serviceId: $service_id) {
success
message
code
service {
id
isEnabled
}
}
}
}
"""
API_DISABLE_MUTATION = """
mutation TestStartService($service_id: String!) {
services {
disableService(serviceId: $service_id) {
success
message
code
service {
id
isEnabled
}
}
}
}
"""
API_STOP_MUTATION = """
mutation TestStopService($service_id: String!) {
services {
stopService(serviceId: $service_id) {
success
message
code
service {
id
status
}
}
}
}
"""
API_SERVICES_QUERY = """
allServices {
id
status
isEnabled
}
"""
API_MOVE_MUTATION = """
mutation TestMoveService($input: MoveServiceInput!) {
services {
moveService(input: $input) {
success
message
code
job {
uid
status
}
service {
id
status
}
}
}
}
"""
def assert_notfound(data):
assert_errorcode(data, 404)
def assert_errorcode(data, errorcode):
assert data["code"] == errorcode
assert data["success"] is False
assert data["message"] is not None
def api_enable(client, service: Service) -> dict:
return api_enable_by_name(client, service.get_id())
def api_enable_by_name(client, service_id: str) -> dict:
response = client.post(
"/graphql",
json={
"query": API_ENABLE_MUTATION,
"variables": {"service_id": service_id},
},
)
return response
def api_disable(client, service: Service) -> dict:
return api_disable_by_name(client, service.get_id())
def api_disable_by_name(client, service_id: str) -> dict:
response = client.post(
"/graphql",
json={
"query": API_DISABLE_MUTATION,
"variables": {"service_id": service_id},
},
)
return response
def api_start(client, service: Service) -> dict:
return api_start_by_name(client, service.get_id())
def api_start_by_name(client, service_id: str) -> dict:
response = client.post(
"/graphql",
json={
"query": API_START_MUTATION,
"variables": {"service_id": service_id},
},
)
return response
def api_move(client, service: Service, location: str) -> dict:
return api_move_by_name(client, service.get_id(), location)
def api_move_by_name(client, service_id: str, location: str) -> dict:
response = client.post(
"/graphql",
json={
"query": API_MOVE_MUTATION,
"variables": {
"input": {
"serviceId": service_id,
"location": location,
}
},
},
)
return response
def api_restart(client, service: Service) -> dict:
return api_restart_by_name(client, service.get_id())
def api_restart_by_name(client, service_id: str) -> dict:
response = client.post(
"/graphql",
json={
"query": API_RESTART_MUTATION,
"variables": {"service_id": service_id},
},
)
return response
def api_stop(client, service: Service) -> dict:
return api_stop_by_name(client, service.get_id())
def api_stop_by_name(client, service_id: str) -> dict:
response = client.post(
"/graphql",
json={
"query": API_STOP_MUTATION,
"variables": {"service_id": service_id},
},
)
return response
def api_all_services(authorized_client):
response = api_all_services_raw(authorized_client)
data = get_data(response)
result = data["services"]["allServices"]
assert result is not None
return result
def api_all_services_raw(client):
return client.post(
"/graphql",
json={"query": generate_service_query([API_SERVICES_QUERY])},
)
def api_service(authorized_client, service: Service):
id = service.get_id()
for _service in api_all_services(authorized_client):
if _service["id"] == id:
return _service
def test_get_services(authorized_client, only_dummy_service):
services = api_all_services(authorized_client)
assert len(services) == 1
api_dummy_service = services[0]
assert api_dummy_service["id"] == "testservice"
assert api_dummy_service["status"] == ServiceStatus.ACTIVE.value
assert api_dummy_service["isEnabled"] is True
def test_enable_return_value(authorized_client, only_dummy_service):
dummy_service = only_dummy_service
mutation_response = api_enable(authorized_client, dummy_service)
data = get_data(mutation_response)["services"]["enableService"]
assert_ok(data)
service = data["service"]
assert service["id"] == dummy_service.get_id()
assert service["isEnabled"] == True
def test_disable_return_value(authorized_client, only_dummy_service):
dummy_service = only_dummy_service
mutation_response = api_disable(authorized_client, dummy_service)
data = get_data(mutation_response)["services"]["disableService"]
assert_ok(data)
service = data["service"]
assert service["id"] == dummy_service.get_id()
assert service["isEnabled"] == False
def test_start_return_value(authorized_client, only_dummy_service):
dummy_service = only_dummy_service
mutation_response = api_start(authorized_client, dummy_service)
data = get_data(mutation_response)["services"]["startService"]
assert_ok(data)
service = data["service"]
assert service["id"] == dummy_service.get_id()
assert service["status"] == ServiceStatus.ACTIVE.value
def test_restart(authorized_client, only_dummy_service):
dummy_service = only_dummy_service
dummy_service.set_delay(0.3)
mutation_response = api_restart(authorized_client, dummy_service)
data = get_data(mutation_response)["services"]["restartService"]
assert_ok(data)
service = data["service"]
assert service["id"] == dummy_service.get_id()
assert service["status"] == ServiceStatus.RELOADING.value
def test_stop_return_value(authorized_client, only_dummy_service):
dummy_service = only_dummy_service
mutation_response = api_stop(authorized_client, dummy_service)
data = get_data(mutation_response)["services"]["stopService"]
assert_ok(data)
service = data["service"]
assert service["id"] == dummy_service.get_id()
assert service["status"] == ServiceStatus.INACTIVE.value
def test_allservices_unauthorized(client, only_dummy_service):
dummy_service = only_dummy_service
response = api_all_services_raw(client)
assert response.status_code == 200
assert response.json().get("data") is None
def test_start_unauthorized(client, only_dummy_service):
dummy_service = only_dummy_service
response = api_start(client, dummy_service)
assert_empty(response)
def test_restart_unauthorized(client, only_dummy_service):
dummy_service = only_dummy_service
response = api_restart(client, dummy_service)
assert_empty(response)
def test_stop_unauthorized(client, only_dummy_service):
dummy_service = only_dummy_service
response = api_stop(client, dummy_service)
assert_empty(response)
def test_enable_unauthorized(client, only_dummy_service):
dummy_service = only_dummy_service
response = api_enable(client, dummy_service)
assert_empty(response)
def test_disable_unauthorized(client, only_dummy_service):
dummy_service = only_dummy_service
response = api_disable(client, dummy_service)
assert_empty(response)
def test_move_unauthorized(client, only_dummy_service):
dummy_service = only_dummy_service
response = api_move(client, dummy_service, "sda1")
assert_empty(response)
def test_start_nonexistent(authorized_client, only_dummy_service):
dummy_service = only_dummy_service
mutation_response = api_start_by_name(authorized_client, "bogus_service")
data = get_data(mutation_response)["services"]["startService"]
assert_notfound(data)
assert data["service"] is None
def test_restart_nonexistent(authorized_client, only_dummy_service):
dummy_service = only_dummy_service
mutation_response = api_restart_by_name(authorized_client, "bogus_service")
data = get_data(mutation_response)["services"]["restartService"]
assert_notfound(data)
assert data["service"] is None
def test_stop_nonexistent(authorized_client, only_dummy_service):
dummy_service = only_dummy_service
mutation_response = api_stop_by_name(authorized_client, "bogus_service")
data = get_data(mutation_response)["services"]["stopService"]
assert_notfound(data)
assert data["service"] is None
def test_enable_nonexistent(authorized_client, only_dummy_service):
dummy_service = only_dummy_service
mutation_response = api_enable_by_name(authorized_client, "bogus_service")
data = get_data(mutation_response)["services"]["enableService"]
assert_notfound(data)
assert data["service"] is None
def test_disable_nonexistent(authorized_client, only_dummy_service):
dummy_service = only_dummy_service
mutation_response = api_disable_by_name(authorized_client, "bogus_service")
data = get_data(mutation_response)["services"]["disableService"]
assert_notfound(data)
assert data["service"] is None
def test_stop_start(authorized_client, only_dummy_service):
dummy_service = only_dummy_service
api_dummy_service = api_all_services(authorized_client)[0]
assert api_dummy_service["status"] == ServiceStatus.ACTIVE.value
# attempting to start an already started service
api_start(authorized_client, dummy_service)
api_dummy_service = api_all_services(authorized_client)[0]
assert api_dummy_service["status"] == ServiceStatus.ACTIVE.value
api_stop(authorized_client, dummy_service)
api_dummy_service = api_all_services(authorized_client)[0]
assert api_dummy_service["status"] == ServiceStatus.INACTIVE.value
# attempting to stop an already stopped service
api_stop(authorized_client, dummy_service)
api_dummy_service = api_all_services(authorized_client)[0]
assert api_dummy_service["status"] == ServiceStatus.INACTIVE.value
api_start(authorized_client, dummy_service)
api_dummy_service = api_all_services(authorized_client)[0]
assert api_dummy_service["status"] == ServiceStatus.ACTIVE.value
def test_disable_enable(authorized_client, only_dummy_service):
dummy_service = only_dummy_service
api_dummy_service = api_all_services(authorized_client)[0]
assert api_dummy_service["isEnabled"] is True
# attempting to enable an already enableed service
api_enable(authorized_client, dummy_service)
api_dummy_service = api_all_services(authorized_client)[0]
assert api_dummy_service["isEnabled"] is True
assert api_dummy_service["status"] == ServiceStatus.ACTIVE.value
api_disable(authorized_client, dummy_service)
api_dummy_service = api_all_services(authorized_client)[0]
assert api_dummy_service["isEnabled"] is False
assert api_dummy_service["status"] == ServiceStatus.ACTIVE.value
# attempting to disable an already disableped service
api_disable(authorized_client, dummy_service)
api_dummy_service = api_all_services(authorized_client)[0]
assert api_dummy_service["isEnabled"] is False
assert api_dummy_service["status"] == ServiceStatus.ACTIVE.value
api_enable(authorized_client, dummy_service)
api_dummy_service = api_all_services(authorized_client)[0]
assert api_dummy_service["isEnabled"] is True
assert api_dummy_service["status"] == ServiceStatus.ACTIVE.value
def test_move_immovable(authorized_client, only_dummy_service):
dummy_service = only_dummy_service
dummy_service.set_movable(False)
mutation_response = api_move(authorized_client, dummy_service, "sda1")
data = get_data(mutation_response)["services"]["moveService"]
assert_errorcode(data, 400)
# is there a meaning in returning the service in this?
assert data["service"] is not None
assert data["job"] is None
def test_move_no_such_volume(authorized_client, only_dummy_service):
dummy_service = only_dummy_service
mutation_response = api_move(authorized_client, dummy_service, "bogus_volume")
data = get_data(mutation_response)["services"]["moveService"]
assert_notfound(data)
# is there a meaning in returning the service in this?
assert data["service"] is not None
assert data["job"] is None
def test_move_same_volume(authorized_client, dummy_service):
# dummy_service = only_dummy_service
# we need a drive that actually exists
root_volume = BlockDevices().get_root_block_device()
dummy_service.set_simulated_moves(False)
dummy_service.set_drive(root_volume.name)
mutation_response = api_move(authorized_client, dummy_service, root_volume.name)
data = get_data(mutation_response)["services"]["moveService"]
assert_errorcode(data, 400)
# is there a meaning in returning the service in this?
assert data["service"] is not None
assert data["job"] is not None
def test_mailservice_cannot_enable_disable(authorized_client):
mailservice = get_service_by_id("email")
mutation_response = api_enable(authorized_client, mailservice)
data = get_data(mutation_response)["services"]["enableService"]
assert_errorcode(data, 400)
# TODO?: we cannot convert mailservice to graphql Service without /var/domain yet
# assert data["service"] is not None
mutation_response = api_disable(authorized_client, mailservice)
data = get_data(mutation_response)["services"]["disableService"]
assert_errorcode(data, 400)
# assert data["service"] is not None

View file

@ -1,9 +1,25 @@
# pylint: disable=redefined-outer-name # pylint: disable=redefined-outer-name
# pylint: disable=unused-argument # pylint: disable=unused-argument
import pytest import pytest
from typing import Optional
from tests.common import read_json from selfprivacy_api.graphql.mutations.system_mutations import SystemMutations
from tests.test_graphql.common import assert_empty from selfprivacy_api.graphql.queries.system import System
# only allowed in fixtures and utils
from selfprivacy_api.actions.ssh import remove_ssh_key, get_ssh_settings
from selfprivacy_api.actions.users import get_users, UserDataUserOrigin
from tests.common import read_json, generate_system_query, generate_users_query
from tests.test_graphql.common import (
assert_empty,
assert_ok,
get_data,
assert_errorcode,
)
from tests.test_graphql.test_users import API_USERS_INFO
key_users = ["root", "tester", "user1", "user2", "user3"]
class ProcessMock: class ProcessMock:
@ -13,7 +29,7 @@ class ProcessMock:
self.args = args self.args = args
self.kwargs = kwargs self.kwargs = kwargs
def communicate(): # pylint: disable=no-method-argument def communicate(self): # pylint: disable=no-method-argument
return (b"NEW_HASHED", None) return (b"NEW_HASHED", None)
returncode = 0 returncode = 0
@ -40,7 +56,56 @@ def some_users(mocker, datadir):
return datadir return datadir
# TESTS ######################################################## @pytest.fixture
def no_rootkeys(generic_userdata):
for rootkey in get_ssh_settings().rootKeys:
remove_ssh_key("root", rootkey)
assert get_ssh_settings().rootKeys == []
@pytest.fixture
def no_keys(generic_userdata):
# this removes root and admin keys too
users = get_users()
for user in users:
for key in user.ssh_keys:
remove_ssh_key(user.username, key)
users = get_users()
for user in users:
assert user.ssh_keys == []
@pytest.fixture
def no_admin_key(generic_userdata, authorized_client):
admin_keys = api_get_user_keys(authorized_client, admin_name())
for admin_key in admin_keys:
remove_ssh_key(admin_name(), admin_key)
assert api_get_user_keys(authorized_client, admin_name()) == []
def admin_name() -> Optional[str]:
users = get_users()
for user in users:
if user.origin == UserDataUserOrigin.PRIMARY:
return user.username
return None
def api_get_user_keys(authorized_client, user: str):
response = authorized_client.post(
"/graphql",
json={
"query": generate_users_query([API_USERS_INFO]),
},
)
data = get_data(response)["users"]["allUsers"]
for _user in data:
if _user["username"] == user:
return _user["sshKeys"]
return None
API_CREATE_SSH_KEY_MUTATION = """ API_CREATE_SSH_KEY_MUTATION = """
@ -59,6 +124,250 @@ mutation addSshKey($sshInput: SshMutationInput!) {
} }
""" """
API_SET_SSH_SETTINGS = """
mutation enableSsh($settings: SSHSettingsInput!) {
system {
changeSshSettings(settings: $settings) {
success
message
code
enable
passwordAuthentication
}
}
}
"""
API_SSH_SETTINGS_QUERY = """
settings {
ssh {
enable
passwordAuthentication
}
}
"""
API_ROOTKEYS_QUERY = """
settings {
ssh {
rootSshKeys
}
}
"""
def api_ssh_settings_raw(client):
return client.post(
"/graphql",
json={"query": generate_system_query([API_SSH_SETTINGS_QUERY])},
)
def api_rootkeys_raw(client):
return client.post(
"/graphql",
json={"query": generate_system_query([API_ROOTKEYS_QUERY])},
)
def api_add_ssh_key(authorized_client, user: str, key: str):
response = authorized_client.post(
"/graphql",
json={
"query": API_CREATE_SSH_KEY_MUTATION,
"variables": {
"sshInput": {
"username": user,
"sshKey": key,
},
},
},
)
data = get_data(response)
result = data["users"]["addSshKey"]
assert result is not None
return result
def api_remove_ssh_key(authorized_client, user: str, key: str):
response = authorized_client.post(
"/graphql",
json={
"query": API_REMOVE_SSH_KEY_MUTATION,
"variables": {
"sshInput": {
"username": user,
"sshKey": key,
},
},
},
)
data = get_data(response)
result = data["users"]["removeSshKey"]
assert result is not None
return result
def api_rootkeys(authorized_client):
response = api_rootkeys_raw(authorized_client)
data = get_data(response)
result = data["system"]["settings"]["ssh"]["rootSshKeys"]
assert result is not None
return result
def api_ssh_settings(authorized_client):
response = api_ssh_settings_raw(authorized_client)
data = get_data(response)
result = data["system"]["settings"]["ssh"]
assert result is not None
return result
def api_set_ssh_settings_dict(authorized_client, dict):
response = authorized_client.post(
"/graphql",
json={
"query": API_SET_SSH_SETTINGS,
"variables": {
"settings": dict,
},
},
)
data = get_data(response)
result = data["system"]["changeSshSettings"]
assert result is not None
return result
def api_set_ssh_settings(authorized_client, enable: bool, password_auth: bool):
return api_set_ssh_settings_dict(
authorized_client,
{
"enable": enable,
"passwordAuthentication": password_auth,
},
)
# TESTS ########################################################
def test_graphql_ssh_query(authorized_client, some_users):
settings = api_ssh_settings(authorized_client)
assert settings["enable"] is True
assert settings["passwordAuthentication"] is True
def test_graphql_get_ssh_settings_unauthorized(client, some_users):
response = api_ssh_settings_raw(client)
assert_empty(response)
def test_graphql_change_ssh_settings_unauthorized(client, some_users):
response = client.post(
"/graphql",
json={
"query": API_SET_SSH_SETTINGS,
"variables": {
"sshInput": {
"enable": True,
"passwordAuthentication": True,
},
},
},
)
assert_empty(response)
def assert_includes(smaller_dict: dict, bigger_dict: dict):
for item in smaller_dict.items():
assert item in bigger_dict.items()
available_settings = [
{"enable": True, "passwordAuthentication": True},
{"enable": True, "passwordAuthentication": False},
{"enable": False, "passwordAuthentication": True},
{"enable": False, "passwordAuthentication": False},
]
original_settings = [
{"enable": True, "passwordAuthentication": True},
{"enable": True, "passwordAuthentication": False},
{"enable": False, "passwordAuthentication": True},
{"enable": False, "passwordAuthentication": False},
]
@pytest.mark.parametrize("original_settings", original_settings)
@pytest.mark.parametrize("settings", available_settings)
def test_graphql_readwrite_ssh_settings(
authorized_client, some_users, settings, original_settings
):
# Userdata-related tests like undefined fields are in actions-level tests.
output = api_set_ssh_settings_dict(authorized_client, original_settings)
assert_includes(api_ssh_settings(authorized_client), output)
output = api_set_ssh_settings_dict(authorized_client, settings)
assert_ok(output)
assert_includes(settings, output)
if "enable" not in settings.keys():
assert output["enable"] == original_settings["enable"]
assert_includes(api_ssh_settings(authorized_client), output)
forbidden_settings = [
# we include this here so that if the next version makes the fields
# optional, the tests will remind the person that tests are to be extended accordingly
{"enable": True},
{"passwordAuthentication": True},
]
@pytest.mark.parametrize("original_settings", original_settings)
@pytest.mark.parametrize("settings", forbidden_settings)
def test_graphql_readwrite_ssh_settings_partial(
authorized_client, some_users, settings, original_settings
):
output = api_set_ssh_settings_dict(authorized_client, original_settings)
with pytest.raises(Exception):
output = api_set_ssh_settings_dict(authorized_client, settings)
def test_graphql_disable_twice(authorized_client, some_users):
output = api_set_ssh_settings(authorized_client, enable=False, password_auth=False)
assert_ok(output)
assert output["enable"] is False
assert output["passwordAuthentication"] is False
output = api_set_ssh_settings(authorized_client, enable=False, password_auth=False)
assert_ok(output)
assert output["enable"] is False
assert output["passwordAuthentication"] is False
def test_graphql_enable_twice(authorized_client, some_users):
output = api_set_ssh_settings(authorized_client, enable=True, password_auth=True)
assert_ok(output)
assert output["enable"] is True
assert output["passwordAuthentication"] is True
assert_includes(api_ssh_settings(authorized_client), output)
output = api_set_ssh_settings(authorized_client, enable=True, password_auth=True)
assert_ok(output)
assert output["enable"] is True
assert output["passwordAuthentication"] is True
assert_includes(api_ssh_settings(authorized_client), output)
############## KEYS
def test_graphql_add_ssh_key_unauthorized(client, some_users, mock_subprocess_popen): def test_graphql_add_ssh_key_unauthorized(client, some_users, mock_subprocess_popen):
response = client.post( response = client.post(
@ -76,106 +385,81 @@ def test_graphql_add_ssh_key_unauthorized(client, some_users, mock_subprocess_po
assert_empty(response) assert_empty(response)
def test_graphql_add_ssh_key(authorized_client, some_users, mock_subprocess_popen): # Unauth getting of keys is tested in test_users.py because it is a part of users interface
response = authorized_client.post(
"/graphql",
json={
"query": API_CREATE_SSH_KEY_MUTATION,
"variables": {
"sshInput": {
"username": "user1",
"sshKey": "ssh-rsa KEY test_key@pc",
},
},
},
)
assert response.status_code == 200
assert response.json().get("data") is not None
assert response.json()["data"]["users"]["addSshKey"]["code"] == 201
assert response.json()["data"]["users"]["addSshKey"]["message"] is not None
assert response.json()["data"]["users"]["addSshKey"]["success"] is True
assert response.json()["data"]["users"]["addSshKey"]["user"]["username"] == "user1" def test_graphql_get_root_key(authorized_client, some_users):
assert response.json()["data"]["users"]["addSshKey"]["user"]["sshKeys"] == [ assert api_rootkeys(authorized_client) == ["ssh-ed25519 KEY test@pc"]
"ssh-rsa KEY user1@pc",
def test_graphql_get_root_key_when_none(authorized_client, no_rootkeys):
assert api_rootkeys(authorized_client) == []
# Getting admin keys when they are present is tested in test_users.py
def test_get_admin_key_when_none(authorized_client, no_admin_key):
assert api_get_user_keys(authorized_client, admin_name()) == []
@pytest.mark.parametrize("user", key_users)
def test_graphql_add_ssh_key_when_none(authorized_client, no_keys, user):
key1 = "ssh-rsa KEY test_key@pc"
if user == "root":
assert api_rootkeys(authorized_client) == []
else:
assert api_get_user_keys(authorized_client, user) == []
output = api_add_ssh_key(authorized_client, user, key1)
assert_ok(output, code=201)
assert output["user"]["username"] == user
assert output["user"]["sshKeys"] == [key1]
if user == "root":
assert api_rootkeys(authorized_client) == [key1]
else:
assert api_get_user_keys(authorized_client, user) == [key1]
@pytest.mark.parametrize("user", key_users)
def test_graphql_add_ssh_key_one_more(authorized_client, no_keys, user):
keys = [
"ssh-rsa KEY test_key@pc", "ssh-rsa KEY test_key@pc",
"ssh-rsa KEY2 test_key@pc",
] ]
output = api_add_ssh_key(authorized_client, user, keys[0])
assert output["user"]["sshKeys"] == [keys[0]]
output = api_add_ssh_key(authorized_client, user, keys[1])
assert_ok(output, code=201)
assert output["user"]["username"] == user
assert output["user"]["sshKeys"] == keys
if user == "root":
assert api_rootkeys(authorized_client) == keys
else:
assert api_get_user_keys(authorized_client, user) == keys
def test_graphql_add_root_ssh_key(authorized_client, some_users, mock_subprocess_popen): @pytest.mark.parametrize("user", key_users)
response = authorized_client.post( def test_graphql_add_ssh_key_same(authorized_client, no_keys, user):
"/graphql", key = "ssh-rsa KEY test_key@pc"
json={ output = api_add_ssh_key(authorized_client, user, key)
"query": API_CREATE_SSH_KEY_MUTATION, assert output["user"]["sshKeys"] == [key]
"variables": {
"sshInput": {
"username": "root",
"sshKey": "ssh-rsa KEY test_key@pc",
},
},
},
)
assert response.status_code == 200
assert response.json().get("data") is not None
assert response.json()["data"]["users"]["addSshKey"]["code"] == 201 output = api_add_ssh_key(authorized_client, user, key)
assert response.json()["data"]["users"]["addSshKey"]["message"] is not None assert_errorcode(output, 409)
assert response.json()["data"]["users"]["addSshKey"]["success"] is True
assert response.json()["data"]["users"]["addSshKey"]["user"]["username"] == "root"
assert response.json()["data"]["users"]["addSshKey"]["user"]["sshKeys"] == [
"ssh-ed25519 KEY test@pc",
"ssh-rsa KEY test_key@pc",
]
def test_graphql_add_main_ssh_key(authorized_client, some_users, mock_subprocess_popen): @pytest.mark.parametrize("user", key_users)
response = authorized_client.post( def test_graphql_add_bad_ssh_key(authorized_client, some_users, user):
"/graphql", output = api_add_ssh_key(authorized_client, user, "trust me, this is the ssh key")
json={ assert_errorcode(output, 400)
"query": API_CREATE_SSH_KEY_MUTATION,
"variables": {
"sshInput": {
"username": "tester",
"sshKey": "ssh-rsa KEY test_key@pc",
},
},
},
)
assert response.status_code == 200
assert response.json().get("data") is not None
assert response.json()["data"]["users"]["addSshKey"]["code"] == 201
assert response.json()["data"]["users"]["addSshKey"]["message"] is not None
assert response.json()["data"]["users"]["addSshKey"]["success"] is True
assert response.json()["data"]["users"]["addSshKey"]["user"]["username"] == "tester"
assert response.json()["data"]["users"]["addSshKey"]["user"]["sshKeys"] == [
"ssh-rsa KEY test@pc",
"ssh-rsa KEY test_key@pc",
]
def test_graphql_add_bad_ssh_key(authorized_client, some_users, mock_subprocess_popen):
response = authorized_client.post(
"/graphql",
json={
"query": API_CREATE_SSH_KEY_MUTATION,
"variables": {
"sshInput": {
"username": "user1",
"sshKey": "trust me, this is the ssh key",
},
},
},
)
assert response.status_code == 200
assert response.json().get("data") is not None
assert response.json()["data"]["users"]["addSshKey"]["code"] == 400
assert response.json()["data"]["users"]["addSshKey"]["message"] is not None
assert response.json()["data"]["users"]["addSshKey"]["success"] is False
def test_graphql_add_ssh_key_nonexistent_user( def test_graphql_add_ssh_key_nonexistent_user(
@ -234,129 +518,35 @@ def test_graphql_remove_ssh_key_unauthorized(client, some_users, mock_subprocess
assert_empty(response) assert_empty(response)
def test_graphql_remove_ssh_key(authorized_client, some_users, mock_subprocess_popen): @pytest.mark.parametrize("user", key_users)
response = authorized_client.post( def test_graphql_remove_ssh_key(authorized_client, no_keys, user):
"/graphql", keys = [
json={ "ssh-rsa KEY test_key@pc",
"query": API_REMOVE_SSH_KEY_MUTATION, "ssh-rsa KEY2 test_key@pc",
"variables": { ]
"sshInput": { output = api_add_ssh_key(authorized_client, user, keys[0])
"username": "user1", output = api_add_ssh_key(authorized_client, user, keys[1])
"sshKey": "ssh-rsa KEY user1@pc", assert output["user"]["sshKeys"] == keys
},
},
},
)
assert response.status_code == 200
assert response.json().get("data") is not None
assert response.json()["data"]["users"]["removeSshKey"]["code"] == 200 output = api_remove_ssh_key(authorized_client, user, keys[1])
assert response.json()["data"]["users"]["removeSshKey"]["message"] is not None assert_ok(output)
assert response.json()["data"]["users"]["removeSshKey"]["success"] is True assert output["user"]["username"] == user
assert output["user"]["sshKeys"] == [keys[0]]
assert ( if user == "root":
response.json()["data"]["users"]["removeSshKey"]["user"]["username"] == "user1" assert api_rootkeys(authorized_client) == [keys[0]]
) else:
assert response.json()["data"]["users"]["removeSshKey"]["user"]["sshKeys"] == [] assert api_get_user_keys(authorized_client, user) == [keys[0]]
def test_graphql_remove_root_ssh_key( @pytest.mark.parametrize("user", key_users)
authorized_client, some_users, mock_subprocess_popen def test_graphql_remove_nonexistent_ssh_key(authorized_client, some_users, user):
): output = api_remove_ssh_key(authorized_client, user, "ssh-rsa nonexistent")
response = authorized_client.post( assert_errorcode(output, 404)
"/graphql",
json={
"query": API_REMOVE_SSH_KEY_MUTATION,
"variables": {
"sshInput": {
"username": "root",
"sshKey": "ssh-ed25519 KEY test@pc",
},
},
},
)
assert response.status_code == 200
assert response.json().get("data") is not None
assert response.json()["data"]["users"]["removeSshKey"]["code"] == 200
assert response.json()["data"]["users"]["removeSshKey"]["message"] is not None
assert response.json()["data"]["users"]["removeSshKey"]["success"] is True
assert (
response.json()["data"]["users"]["removeSshKey"]["user"]["username"] == "root"
)
assert response.json()["data"]["users"]["removeSshKey"]["user"]["sshKeys"] == []
def test_graphql_remove_main_ssh_key(
authorized_client, some_users, mock_subprocess_popen
):
response = authorized_client.post(
"/graphql",
json={
"query": API_REMOVE_SSH_KEY_MUTATION,
"variables": {
"sshInput": {
"username": "tester",
"sshKey": "ssh-rsa KEY test@pc",
},
},
},
)
assert response.status_code == 200
assert response.json().get("data") is not None
assert response.json()["data"]["users"]["removeSshKey"]["code"] == 200
assert response.json()["data"]["users"]["removeSshKey"]["message"] is not None
assert response.json()["data"]["users"]["removeSshKey"]["success"] is True
assert (
response.json()["data"]["users"]["removeSshKey"]["user"]["username"] == "tester"
)
assert response.json()["data"]["users"]["removeSshKey"]["user"]["sshKeys"] == []
def test_graphql_remove_nonexistent_ssh_key(
authorized_client, some_users, mock_subprocess_popen
):
response = authorized_client.post(
"/graphql",
json={
"query": API_REMOVE_SSH_KEY_MUTATION,
"variables": {
"sshInput": {
"username": "user1",
"sshKey": "ssh-rsa KEY test_key@pc",
},
},
},
)
assert response.status_code == 200
assert response.json().get("data") is not None
assert response.json()["data"]["users"]["removeSshKey"]["code"] == 404
assert response.json()["data"]["users"]["removeSshKey"]["message"] is not None
assert response.json()["data"]["users"]["removeSshKey"]["success"] is False
def test_graphql_remove_ssh_key_nonexistent_user( def test_graphql_remove_ssh_key_nonexistent_user(
authorized_client, some_users, mock_subprocess_popen authorized_client, some_users, mock_subprocess_popen
): ):
response = authorized_client.post( output = api_remove_ssh_key(authorized_client, "user666", "ssh-rsa KEY test_key@pc")
"/graphql", assert_errorcode(output, 404)
json={
"query": API_REMOVE_SSH_KEY_MUTATION,
"variables": {
"sshInput": {
"username": "user666",
"sshKey": "ssh-rsa KEY test_key@pc",
},
},
},
)
assert response.status_code == 200
assert response.json().get("data") is not None
assert response.json()["data"]["users"]["removeSshKey"]["code"] == 404
assert response.json()["data"]["users"]["removeSshKey"]["message"] is not None
assert response.json()["data"]["users"]["removeSshKey"]["success"] is False

View file

@ -6,6 +6,7 @@ import pytest
from tests.common import generate_system_query, read_json from tests.common import generate_system_query, read_json
from tests.test_graphql.common import assert_empty from tests.test_graphql.common import assert_empty
from tests.test_dkim import no_dkim_file, dkim_file
@pytest.fixture @pytest.fixture
@ -332,6 +333,29 @@ def test_graphql_get_domain(
) )
def test_graphql_get_domain_no_dkim(
authorized_client,
domain_file,
mock_get_ip4,
mock_get_ip6,
no_dkim_file,
turned_on,
):
"""Test no DKIM file situation gets properly handled"""
response = authorized_client.post(
"/graphql",
json={
"query": generate_system_query([API_GET_DOMAIN_INFO]),
},
)
assert response.status_code == 200
assert response.json().get("data") is not None
dns_records = response.json()["data"]["system"]["domainInfo"]["requiredDnsRecords"]
for record in dns_records:
if record["name"] == "selector._domainkey":
raise ValueError("unexpected record found:", record)
API_GET_TIMEZONE = """ API_GET_TIMEZONE = """
settings { settings {
timezone timezone

View file

@ -3,18 +3,10 @@
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
}, },
"bitwarden": {
"enable": true
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
"hostname": "test-instance", "hostname": "test-instance",
"nextcloud": {
"adminPassword": "ADMIN",
"databasePassword": "ADMIN",
"enable": true
},
"resticPassword": "PASS", "resticPassword": "PASS",
"ssh": { "ssh": {
"enable": true, "enable": true,
@ -24,6 +16,7 @@
] ]
}, },
"username": "tester", "username": "tester",
"modules": {
"gitea": { "gitea": {
"enable": true "enable": true
}, },
@ -36,6 +29,15 @@
"jitsi": { "jitsi": {
"enable": true "enable": true
}, },
"nextcloud": {
"adminPassword": "ADMIN",
"databasePassword": "ADMIN",
"enable": true
},
"bitwarden": {
"enable": true
}
},
"autoUpgrade": { "autoUpgrade": {
"enable": true, "enable": true,
"allowReboot": true "allowReboot": true

View file

@ -23,15 +23,6 @@ class ProcessMock:
returncode = 0 returncode = 0
class BrokenServiceMock(ProcessMock):
"""Mock subprocess.Popen for broken service"""
def communicate(): # pylint: disable=no-method-argument
return (b"Testing error", None)
returncode = 3
@pytest.fixture @pytest.fixture
def mock_subprocess_popen(mocker): def mock_subprocess_popen(mocker):
mock = mocker.patch("subprocess.Popen", autospec=True, return_value=ProcessMock) mock = mocker.patch("subprocess.Popen", autospec=True, return_value=ProcessMock)

View file

@ -6,7 +6,13 @@ from tests.common import (
generate_users_query, generate_users_query,
read_json, read_json,
) )
from tests.test_graphql.common import assert_empty from selfprivacy_api.utils import WriteUserData
from tests.test_graphql.common import (
assert_empty,
assert_errorcode,
assert_ok,
get_data,
)
invalid_usernames = [ invalid_usernames = [
"messagebus", "messagebus",
@ -89,6 +95,15 @@ def undefined_settings(mocker, datadir):
return datadir return datadir
@pytest.fixture
def no_users_no_admin_nobody(undefined_settings):
datadir = undefined_settings
with WriteUserData() as data:
del data["username"]
del data["sshKeys"]
return datadir
class ProcessMock: class ProcessMock:
"""Mock subprocess.Popen""" """Mock subprocess.Popen"""
@ -118,6 +133,17 @@ allUsers {
""" """
def api_all_users(authorized_client):
response = authorized_client.post(
"/graphql",
json={
"query": generate_users_query([API_USERS_INFO]),
},
)
output = get_data(response)["users"]["allUsers"]
return output
def test_graphql_get_users_unauthorized(client, some_users, mock_subprocess_popen): def test_graphql_get_users_unauthorized(client, some_users, mock_subprocess_popen):
"""Test wrong auth""" """Test wrong auth"""
response = client.post( response = client.post(
@ -170,6 +196,38 @@ def test_graphql_get_no_users(authorized_client, no_users, mock_subprocess_popen
] ]
def test_graphql_get_users_undefined_but_admin(authorized_client, undefined_settings):
response = authorized_client.post(
"/graphql",
json={
"query": generate_users_query([API_USERS_INFO]),
},
)
assert response.status_code == 200
assert response.json().get("data") is not None
assert len(response.json()["data"]["users"]["allUsers"]) == 1
assert response.json()["data"]["users"]["allUsers"][0]["username"] == "tester"
assert response.json()["data"]["users"]["allUsers"][0]["sshKeys"] == [
"ssh-rsa KEY test@pc"
]
def test_graphql_get_users_undefined_no_admin(
authorized_client, no_users_no_admin_nobody
):
response = authorized_client.post(
"/graphql",
json={
"query": generate_users_query([API_USERS_INFO]),
},
)
assert response.status_code == 200
assert response.json().get("data") is not None
assert len(response.json()["data"]["users"]["allUsers"]) == 0
API_GET_USERS = """ API_GET_USERS = """
query TestUsers($username: String!) { query TestUsers($username: String!) {
users { users {
@ -216,6 +274,23 @@ def test_graphql_get_one_user(authorized_client, one_user, mock_subprocess_popen
] ]
def test_graphql_get_some_user_undefined(authorized_client, undefined_settings):
response = authorized_client.post(
"/graphql",
json={
"query": API_GET_USERS,
"variables": {
"username": "user1",
},
},
)
assert response.status_code == 200
assert response.json().get("data") is not None
assert response.json()["data"]["users"]["getUser"] is None
def test_graphql_get_some_user(authorized_client, some_users, mock_subprocess_popen): def test_graphql_get_some_user(authorized_client, some_users, mock_subprocess_popen):
response = authorized_client.post( response = authorized_client.post(
"/graphql", "/graphql",
@ -309,248 +384,135 @@ mutation createUser($user: UserMutationInput!) {
""" """
def test_graphql_add_user_unauthorize(client, one_user, mock_subprocess_popen): def api_add_user_json(authorized_client, user_json: dict):
response = client.post( # lowlevel for deeper testing of edgecases
return authorized_client.post(
"/graphql", "/graphql",
json={ json={
"query": API_CREATE_USERS_MUTATION, "query": API_CREATE_USERS_MUTATION,
"variables": { "variables": {
"user": { "user": user_json,
"username": "user2",
"password": "12345678",
},
}, },
}, },
) )
def api_add_user(authorized_client, username, password):
response = api_add_user_json(
authorized_client, {"username": username, "password": password}
)
output = get_data(response)["users"]["createUser"]
return output
def test_graphql_add_user_unauthorized(client, one_user, mock_subprocess_popen):
response = api_add_user_json(client, {"username": "user2", "password": "12345678"})
assert_empty(response) assert_empty(response)
def test_graphql_add_user(authorized_client, one_user, mock_subprocess_popen): def test_graphql_add_user(authorized_client, one_user, mock_subprocess_popen):
response = authorized_client.post( output = api_add_user(authorized_client, "user2", password="12345678")
"/graphql", assert_ok(output, code=201)
json={
"query": API_CREATE_USERS_MUTATION,
"variables": {
"user": {
"username": "user2",
"password": "12345678",
},
},
},
)
assert response.status_code == 200
assert response.json().get("data") is not None
assert response.json()["data"]["users"]["createUser"]["message"] is not None assert output["user"]["username"] == "user2"
assert response.json()["data"]["users"]["createUser"]["code"] == 201 assert output["user"]["sshKeys"] == []
assert response.json()["data"]["users"]["createUser"]["success"] is True
assert response.json()["data"]["users"]["createUser"]["user"]["username"] == "user2"
assert response.json()["data"]["users"]["createUser"]["user"]["sshKeys"] == []
def test_graphql_add_undefined_settings( def test_graphql_add_user_when_undefined_settings(
authorized_client, undefined_settings, mock_subprocess_popen authorized_client, undefined_settings, mock_subprocess_popen
): ):
response = authorized_client.post( output = api_add_user(authorized_client, "user2", password="12345678")
"/graphql", assert_ok(output, code=201)
json={
"query": API_CREATE_USERS_MUTATION,
"variables": {
"user": {
"username": "user2",
"password": "12345678",
},
},
},
)
assert response.status_code == 200
assert response.json().get("data") is not None
assert response.json()["data"]["users"]["createUser"]["message"] is not None assert output["user"]["username"] == "user2"
assert response.json()["data"]["users"]["createUser"]["code"] == 201 assert output["user"]["sshKeys"] == []
assert response.json()["data"]["users"]["createUser"]["success"] is True
assert response.json()["data"]["users"]["createUser"]["user"]["username"] == "user2"
assert response.json()["data"]["users"]["createUser"]["user"]["sshKeys"] == []
def test_graphql_add_without_password( users_witn_empty_fields = [
authorized_client, one_user, mock_subprocess_popen {"username": "user2", "password": ""},
): {"username": "", "password": "12345678"},
response = authorized_client.post( {"username": "", "password": ""},
"/graphql", ]
json={
"query": API_CREATE_USERS_MUTATION,
"variables": {
"user": {
"username": "user2",
"password": "",
},
},
},
)
assert response.status_code == 200
assert response.json().get("data") is not None
assert response.json()["data"]["users"]["createUser"]["message"] is not None
assert response.json()["data"]["users"]["createUser"]["code"] == 400
assert response.json()["data"]["users"]["createUser"]["success"] is False
assert response.json()["data"]["users"]["createUser"]["user"] is None
def test_graphql_add_without_both(authorized_client, one_user, mock_subprocess_popen): @pytest.mark.parametrize("user_json", users_witn_empty_fields)
response = authorized_client.post( def test_graphql_add_with_empty_fields(authorized_client, one_user, user_json):
"/graphql", response = api_add_user_json(authorized_client, user_json)
json={ output = get_data(response)["users"]["createUser"]
"query": API_CREATE_USERS_MUTATION,
"variables": {
"user": {
"username": "",
"password": "",
},
},
},
)
assert response.status_code == 200
assert response.json().get("data") is not None
assert response.json()["data"]["users"]["createUser"]["message"] is not None assert_errorcode(output, 400)
assert response.json()["data"]["users"]["createUser"]["code"] == 400 assert output["user"] is None
assert response.json()["data"]["users"]["createUser"]["success"] is False
assert response.json()["data"]["users"]["createUser"]["user"] is None
users_witn_undefined_fields = [
{"username": "user2"},
{"password": "12345678"},
{},
]
@pytest.mark.parametrize("user_json", users_witn_undefined_fields)
def test_graphql_add_with_undefined_fields(authorized_client, one_user, user_json):
# checking that all fields are mandatory
response = api_add_user_json(authorized_client, user_json)
assert response.json()["errors"] is not None
assert response.json()["errors"] != []
@pytest.mark.parametrize("username", invalid_usernames) @pytest.mark.parametrize("username", invalid_usernames)
def test_graphql_add_system_username( def test_graphql_add_system_username(
authorized_client, one_user, mock_subprocess_popen, username authorized_client, one_user, mock_subprocess_popen, username
): ):
response = authorized_client.post( output = api_add_user(authorized_client, username, password="12345678")
"/graphql",
json={
"query": API_CREATE_USERS_MUTATION,
"variables": {
"user": {
"username": username,
"password": "12345678",
},
},
},
)
assert response.status_code == 200
assert response.json().get("data") is not None
assert response.json()["data"]["users"]["createUser"]["message"] is not None assert_errorcode(output, code=409)
assert response.json()["data"]["users"]["createUser"]["code"] == 409 assert output["user"] is None
assert response.json()["data"]["users"]["createUser"]["success"] is False
assert response.json()["data"]["users"]["createUser"]["user"] is None
def test_graphql_add_existing_user(authorized_client, one_user, mock_subprocess_popen): def test_graphql_add_existing_user(authorized_client, one_user):
response = authorized_client.post( output = api_add_user(authorized_client, "user1", password="12345678")
"/graphql",
json={
"query": API_CREATE_USERS_MUTATION,
"variables": {
"user": {
"username": "user1",
"password": "12345678",
},
},
},
)
assert response.status_code == 200
assert response.json().get("data") is not None
assert response.json()["data"]["users"]["createUser"]["message"] is not None assert_errorcode(output, code=409)
assert response.json()["data"]["users"]["createUser"]["code"] == 409 assert output["user"]["username"] == "user1"
assert response.json()["data"]["users"]["createUser"]["success"] is False assert output["user"]["sshKeys"][0] == "ssh-rsa KEY user1@pc"
assert response.json()["data"]["users"]["createUser"]["user"]["username"] == "user1"
assert (
response.json()["data"]["users"]["createUser"]["user"]["sshKeys"][0]
== "ssh-rsa KEY user1@pc"
)
def test_graphql_add_main_user(authorized_client, one_user, mock_subprocess_popen): def test_graphql_add_main_user(authorized_client, one_user):
response = authorized_client.post( output = api_add_user(authorized_client, "tester", password="12345678")
"/graphql",
json={
"query": API_CREATE_USERS_MUTATION,
"variables": {
"user": {
"username": "tester",
"password": "12345678",
},
},
},
)
assert response.status_code == 200
assert response.json().get("data") is not None
assert response.json()["data"]["users"]["createUser"]["message"] is not None assert_errorcode(output, code=409)
assert response.json()["data"]["users"]["createUser"]["code"] == 409 assert output["user"]["username"] == "tester"
assert response.json()["data"]["users"]["createUser"]["success"] is False assert output["user"]["sshKeys"][0] == "ssh-rsa KEY test@pc"
assert (
response.json()["data"]["users"]["createUser"]["user"]["username"] == "tester" def test_graphql_add_user_when_no_admin_defined(
) authorized_client, no_users_no_admin_nobody
assert ( ):
response.json()["data"]["users"]["createUser"]["user"]["sshKeys"][0] output = api_add_user(authorized_client, "tester", password="12345678")
== "ssh-rsa KEY test@pc"
) assert_errorcode(output, code=400)
assert output["user"] is None
def test_graphql_add_long_username(authorized_client, one_user, mock_subprocess_popen): def test_graphql_add_long_username(authorized_client, one_user, mock_subprocess_popen):
response = authorized_client.post( output = api_add_user(authorized_client, "a" * 32, password="12345678")
"/graphql",
json={ assert_errorcode(output, code=400)
"query": API_CREATE_USERS_MUTATION, assert output["user"] is None
"variables": {
"user": {
"username": "a" * 32, # TODO: maybe make a username generating function to make a more comprehensive invalid username test
"password": "12345678", @pytest.mark.parametrize(
}, "username", ["", "1", "фыр", "user1@", "^-^", "№:%##$^&@$&^()_"]
},
},
) )
assert response.json().get("data") is not None
assert response.json()["data"]["users"]["createUser"]["message"] is not None
assert response.json()["data"]["users"]["createUser"]["code"] == 400
assert response.json()["data"]["users"]["createUser"]["success"] is False
assert response.json()["data"]["users"]["createUser"]["user"] is None
@pytest.mark.parametrize("username", ["", "1", "фыр", "user1@", "^-^"])
def test_graphql_add_invalid_username( def test_graphql_add_invalid_username(
authorized_client, one_user, mock_subprocess_popen, username authorized_client, one_user, mock_subprocess_popen, username
): ):
response = authorized_client.post( output = api_add_user(authorized_client, username, password="12345678")
"/graphql",
json={
"query": API_CREATE_USERS_MUTATION,
"variables": {
"user": {
"username": username,
"password": "12345678",
},
},
},
)
assert response.status_code == 200
assert response.json().get("data") is not None
assert response.json()["data"]["users"]["createUser"]["message"] is not None assert_errorcode(output, code=400)
assert response.json()["data"]["users"]["createUser"]["code"] == 400 assert output["user"] is None
assert response.json()["data"]["users"]["createUser"]["success"] is False
assert response.json()["data"]["users"]["createUser"]["user"] is None
API_DELETE_USER_MUTATION = """ API_DELETE_USER_MUTATION = """
@ -592,6 +554,11 @@ def test_graphql_delete_user(authorized_client, some_users, mock_subprocess_pope
assert response.json()["data"]["users"]["deleteUser"]["message"] is not None assert response.json()["data"]["users"]["deleteUser"]["message"] is not None
assert response.json()["data"]["users"]["deleteUser"]["success"] is True assert response.json()["data"]["users"]["deleteUser"]["success"] is True
new_users = api_all_users(authorized_client)
assert len(new_users) == 3
usernames = [user["username"] for user in new_users]
assert set(usernames) == set(["user2", "user3", "tester"])
@pytest.mark.parametrize("username", ["", "def"]) @pytest.mark.parametrize("username", ["", "def"])
def test_graphql_delete_nonexistent_users( def test_graphql_delete_nonexistent_users(

60
tests/test_migrations.py Normal file
View file

@ -0,0 +1,60 @@
import pytest
from selfprivacy_api.migrations.modules_in_json import CreateModulesField
from selfprivacy_api.utils import ReadUserData, WriteUserData
from selfprivacy_api.services import get_all_services
@pytest.fixture()
def stray_services(mocker, datadir):
mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "strays.json")
return datadir
@pytest.fixture()
def empty_json(generic_userdata):
with WriteUserData() as data:
data.clear()
with ReadUserData() as data:
assert len(data.keys()) == 0
return
def test_modules_empty_json(empty_json):
with ReadUserData() as data:
assert "modules" not in data.keys()
assert CreateModulesField().is_migration_needed()
CreateModulesField().migrate()
assert not CreateModulesField().is_migration_needed()
with ReadUserData() as data:
assert "modules" in data.keys()
@pytest.mark.parametrize("modules_field", [True, False])
def test_modules_stray_services(modules_field, stray_services):
if not modules_field:
with WriteUserData() as data:
del data["modules"]
assert CreateModulesField().is_migration_needed()
CreateModulesField().migrate()
for service in get_all_services():
# assumes we do not tolerate previous format
assert service.is_enabled()
if service.get_id() == "email":
continue
with ReadUserData() as data:
assert service.get_id() in data["modules"].keys()
assert service.get_id() not in data.keys()
assert not CreateModulesField().is_migration_needed()
def test_modules_no_migration_on_generic_data(generic_userdata):
assert not CreateModulesField().is_migration_needed()

View file

@ -0,0 +1,23 @@
{
"bitwarden": {
"enable": true
},
"nextcloud": {
"adminPassword": "ADMIN",
"databasePassword": "ADMIN",
"enable": true
},
"gitea": {
"enable": true
},
"ocserv": {
"enable": true
},
"pleroma": {
"enable": true
},
"jitsi": {
"enable": true
},
"modules": {}
}

View file

@ -0,0 +1,9 @@
{
"tokens": [
{
"token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI",
"name": "primary_token",
"date": "2022-07-15 17:41:31.675698"
}
]
}

View file

@ -0,0 +1,9 @@
{
"tokens": [
{
"token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI",
"name": "primary_token",
"date": "2022-07-15 17:41:31.675698"
}
]
}

View file

@ -1 +0,0 @@
{}

View file

@ -1,125 +0,0 @@
import json
import pytest
def read_json(file_path):
with open(file_path, "r") as f:
return json.load(f)
###############################################################################
@pytest.fixture
def bitwarden_off(mocker, datadir):
mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_off.json")
assert read_json(datadir / "turned_off.json")["bitwarden"]["enable"] == False
return datadir
@pytest.fixture
def bitwarden_on(mocker, datadir):
mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_on.json")
assert read_json(datadir / "turned_on.json")["bitwarden"]["enable"] == True
return datadir
@pytest.fixture
def bitwarden_enable_undefined(mocker, datadir):
mocker.patch(
"selfprivacy_api.utils.USERDATA_FILE", new=datadir / "enable_undefined.json"
)
assert "enable" not in read_json(datadir / "enable_undefined.json")["bitwarden"]
return datadir
@pytest.fixture
def bitwarden_undefined(mocker, datadir):
mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "undefined.json")
assert "bitwarden" not in read_json(datadir / "undefined.json")
return datadir
###############################################################################
@pytest.mark.parametrize("endpoint", ["enable", "disable"])
def test_unauthorized(client, bitwarden_off, endpoint):
response = client.post(f"/services/bitwarden/{endpoint}")
assert response.status_code == 401
@pytest.mark.parametrize("endpoint", ["enable", "disable"])
def test_illegal_methods(authorized_client, bitwarden_off, endpoint):
response = authorized_client.get(f"/services/bitwarden/{endpoint}")
assert response.status_code == 405
response = authorized_client.put(f"/services/bitwarden/{endpoint}")
assert response.status_code == 405
response = authorized_client.delete(f"/services/bitwarden/{endpoint}")
assert response.status_code == 405
@pytest.mark.parametrize(
"endpoint,target_file",
[("enable", "turned_on.json"), ("disable", "turned_off.json")],
)
def test_switch_from_off(authorized_client, bitwarden_off, endpoint, target_file):
response = authorized_client.post(f"/services/bitwarden/{endpoint}")
assert response.status_code == 200
assert read_json(bitwarden_off / "turned_off.json") == read_json(
bitwarden_off / target_file
)
@pytest.mark.parametrize(
"endpoint,target_file",
[("enable", "turned_on.json"), ("disable", "turned_off.json")],
)
def test_switch_from_on(authorized_client, bitwarden_on, endpoint, target_file):
response = authorized_client.post(f"/services/bitwarden/{endpoint}")
assert response.status_code == 200
assert read_json(bitwarden_on / "turned_on.json") == read_json(
bitwarden_on / target_file
)
@pytest.mark.parametrize(
"endpoint,target_file",
[("enable", "turned_on.json"), ("disable", "turned_off.json")],
)
def test_switch_twice(authorized_client, bitwarden_off, endpoint, target_file):
response = authorized_client.post(f"/services/bitwarden/{endpoint}")
assert response.status_code == 200
response = authorized_client.post(f"/services/bitwarden/{endpoint}")
assert response.status_code == 200
assert read_json(bitwarden_off / "turned_off.json") == read_json(
bitwarden_off / target_file
)
@pytest.mark.parametrize(
"endpoint,target_file",
[("enable", "turned_on.json"), ("disable", "turned_off.json")],
)
def test_on_attribute_deleted(
authorized_client, bitwarden_enable_undefined, endpoint, target_file
):
response = authorized_client.post(f"/services/bitwarden/{endpoint}")
assert response.status_code == 200
assert read_json(bitwarden_enable_undefined / "enable_undefined.json") == read_json(
bitwarden_enable_undefined / target_file
)
@pytest.mark.parametrize(
"endpoint,target_file",
[("enable", "turned_on.json"), ("disable", "turned_off.json")],
)
def test_on_bitwarden_undefined(
authorized_client, bitwarden_undefined, endpoint, target_file
):
response = authorized_client.post(f"/services/bitwarden/{endpoint}")
assert response.status_code == 200
assert read_json(bitwarden_undefined / "undefined.json") == read_json(
bitwarden_undefined / target_file
)

View file

@ -1,56 +0,0 @@
{
"api": {
"token": "TEST_TOKEN",
"enableSwagger": false
},
"bitwarden": {
},
"databasePassword": "PASSWORD",
"domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD",
"hostname": "test-instance",
"nextcloud": {
"adminPassword": "ADMIN",
"databasePassword": "ADMIN",
"enable": true
},
"resticPassword": "PASS",
"ssh": {
"enable": true,
"passwordAuthentication": true,
"rootKeys": [
"ssh-ed25519 KEY test@pc"
]
},
"username": "tester",
"gitea": {
"enable": false
},
"ocserv": {
"enable": true
},
"pleroma": {
"enable": true
},
"autoUpgrade": {
"enable": true,
"allowReboot": true
},
"timezone": "Europe/Moscow",
"sshKeys": [
"ssh-rsa KEY test@pc"
],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
}

View file

@ -1,57 +0,0 @@
{
"api": {
"token": "TEST_TOKEN",
"enableSwagger": false
},
"bitwarden": {
"enable": false
},
"databasePassword": "PASSWORD",
"domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD",
"hostname": "test-instance",
"nextcloud": {
"adminPassword": "ADMIN",
"databasePassword": "ADMIN",
"enable": true
},
"resticPassword": "PASS",
"ssh": {
"enable": true,
"passwordAuthentication": true,
"rootKeys": [
"ssh-ed25519 KEY test@pc"
]
},
"username": "tester",
"gitea": {
"enable": false
},
"ocserv": {
"enable": true
},
"pleroma": {
"enable": true
},
"autoUpgrade": {
"enable": true,
"allowReboot": true
},
"timezone": "Europe/Moscow",
"sshKeys": [
"ssh-rsa KEY test@pc"
],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
}

View file

@ -1,57 +0,0 @@
{
"api": {
"token": "TEST_TOKEN",
"enableSwagger": false
},
"bitwarden": {
"enable": true
},
"databasePassword": "PASSWORD",
"domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD",
"hostname": "test-instance",
"nextcloud": {
"adminPassword": "ADMIN",
"databasePassword": "ADMIN",
"enable": true
},
"resticPassword": "PASS",
"ssh": {
"enable": true,
"passwordAuthentication": true,
"rootKeys": [
"ssh-ed25519 KEY test@pc"
]
},
"username": "tester",
"gitea": {
"enable": false
},
"ocserv": {
"enable": true
},
"pleroma": {
"enable": true
},
"autoUpgrade": {
"enable": true,
"allowReboot": true
},
"timezone": "Europe/Moscow",
"sshKeys": [
"ssh-rsa KEY test@pc"
],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
}

View file

@ -1,54 +0,0 @@
{
"api": {
"token": "TEST_TOKEN",
"enableSwagger": false
},
"databasePassword": "PASSWORD",
"domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD",
"hostname": "test-instance",
"nextcloud": {
"adminPassword": "ADMIN",
"databasePassword": "ADMIN",
"enable": true
},
"resticPassword": "PASS",
"ssh": {
"enable": true,
"passwordAuthentication": true,
"rootKeys": [
"ssh-ed25519 KEY test@pc"
]
},
"username": "tester",
"gitea": {
"enable": false
},
"ocserv": {
"enable": true
},
"pleroma": {
"enable": true
},
"autoUpgrade": {
"enable": true,
"allowReboot": true
},
"timezone": "Europe/Moscow",
"sshKeys": [
"ssh-rsa KEY test@pc"
],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
}

View file

@ -1,121 +0,0 @@
import json
import pytest
def read_json(file_path):
with open(file_path, "r") as f:
return json.load(f)
###############################################################################
@pytest.fixture
def gitea_off(mocker, datadir):
mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_off.json")
assert read_json(datadir / "turned_off.json")["gitea"]["enable"] == False
return datadir
@pytest.fixture
def gitea_on(mocker, datadir):
mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_on.json")
assert read_json(datadir / "turned_on.json")["gitea"]["enable"] == True
return datadir
@pytest.fixture
def gitea_enable_undefined(mocker, datadir):
mocker.patch(
"selfprivacy_api.utils.USERDATA_FILE", new=datadir / "enable_undefined.json"
)
assert "enable" not in read_json(datadir / "enable_undefined.json")["gitea"]
return datadir
@pytest.fixture
def gitea_undefined(mocker, datadir):
mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "undefined.json")
assert "gitea" not in read_json(datadir / "undefined.json")
return datadir
###############################################################################
@pytest.mark.parametrize("endpoint", ["enable", "disable"])
def test_unauthorized(client, gitea_off, endpoint):
response = client.post(f"/services/gitea/{endpoint}")
assert response.status_code == 401
@pytest.mark.parametrize("endpoint", ["enable", "disable"])
def test_illegal_methods(authorized_client, gitea_off, endpoint):
response = authorized_client.get(f"/services/gitea/{endpoint}")
assert response.status_code == 405
response = authorized_client.put(f"/services/gitea/{endpoint}")
assert response.status_code == 405
response = authorized_client.delete(f"/services/gitea/{endpoint}")
assert response.status_code == 405
@pytest.mark.parametrize(
"endpoint,target_file",
[("enable", "turned_on.json"), ("disable", "turned_off.json")],
)
def test_switch_from_off(authorized_client, gitea_off, endpoint, target_file):
response = authorized_client.post(f"/services/gitea/{endpoint}")
assert response.status_code == 200
assert read_json(gitea_off / "turned_off.json") == read_json(
gitea_off / target_file
)
@pytest.mark.parametrize(
"endpoint,target_file",
[("enable", "turned_on.json"), ("disable", "turned_off.json")],
)
def test_switch_from_on(authorized_client, gitea_on, endpoint, target_file):
response = authorized_client.post(f"/services/gitea/{endpoint}")
assert response.status_code == 200
assert read_json(gitea_on / "turned_on.json") == read_json(gitea_on / target_file)
@pytest.mark.parametrize(
"endpoint,target_file",
[("enable", "turned_on.json"), ("disable", "turned_off.json")],
)
def test_switch_twice(authorized_client, gitea_off, endpoint, target_file):
response = authorized_client.post(f"/services/gitea/{endpoint}")
assert response.status_code == 200
response = authorized_client.post(f"/services/gitea/{endpoint}")
assert response.status_code == 200
assert read_json(gitea_off / "turned_off.json") == read_json(
gitea_off / target_file
)
@pytest.mark.parametrize(
"endpoint,target_file",
[("enable", "turned_on.json"), ("disable", "turned_off.json")],
)
def test_on_attribute_deleted(
authorized_client, gitea_enable_undefined, endpoint, target_file
):
response = authorized_client.post(f"/services/gitea/{endpoint}")
assert response.status_code == 200
assert read_json(gitea_enable_undefined / "enable_undefined.json") == read_json(
gitea_enable_undefined / target_file
)
@pytest.mark.parametrize(
"endpoint,target_file",
[("enable", "turned_on.json"), ("disable", "turned_off.json")],
)
def test_on_gitea_undefined(authorized_client, gitea_undefined, endpoint, target_file):
response = authorized_client.post(f"/services/gitea/{endpoint}")
assert response.status_code == 200
assert read_json(gitea_undefined / "undefined.json") == read_json(
gitea_undefined / target_file
)

View file

@ -1,56 +0,0 @@
{
"api": {
"token": "TEST_TOKEN",
"enableSwagger": false
},
"bitwarden": {
"enable": false
},
"databasePassword": "PASSWORD",
"domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD",
"hostname": "test-instance",
"nextcloud": {
"adminPassword": "ADMIN",
"databasePassword": "ADMIN",
"enable": true
},
"resticPassword": "PASS",
"ssh": {
"enable": true,
"passwordAuthentication": true,
"rootKeys": [
"ssh-ed25519 KEY test@pc"
]
},
"username": "tester",
"gitea": {
},
"ocserv": {
"enable": true
},
"pleroma": {
"enable": true
},
"autoUpgrade": {
"enable": true,
"allowReboot": true
},
"timezone": "Europe/Moscow",
"sshKeys": [
"ssh-rsa KEY test@pc"
],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
}

View file

@ -1,57 +0,0 @@
{
"api": {
"token": "TEST_TOKEN",
"enableSwagger": false
},
"bitwarden": {
"enable": false
},
"databasePassword": "PASSWORD",
"domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD",
"hostname": "test-instance",
"nextcloud": {
"adminPassword": "ADMIN",
"databasePassword": "ADMIN",
"enable": true
},
"resticPassword": "PASS",
"ssh": {
"enable": true,
"passwordAuthentication": true,
"rootKeys": [
"ssh-ed25519 KEY test@pc"
]
},
"username": "tester",
"gitea": {
"enable": false
},
"ocserv": {
"enable": true
},
"pleroma": {
"enable": true
},
"autoUpgrade": {
"enable": true,
"allowReboot": true
},
"timezone": "Europe/Moscow",
"sshKeys": [
"ssh-rsa KEY test@pc"
],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
}

View file

@ -1,57 +0,0 @@
{
"api": {
"token": "TEST_TOKEN",
"enableSwagger": false
},
"bitwarden": {
"enable": false
},
"databasePassword": "PASSWORD",
"domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD",
"hostname": "test-instance",
"nextcloud": {
"adminPassword": "ADMIN",
"databasePassword": "ADMIN",
"enable": true
},
"resticPassword": "PASS",
"ssh": {
"enable": true,
"passwordAuthentication": true,
"rootKeys": [
"ssh-ed25519 KEY test@pc"
]
},
"username": "tester",
"gitea": {
"enable": true
},
"ocserv": {
"enable": true
},
"pleroma": {
"enable": true
},
"autoUpgrade": {
"enable": true,
"allowReboot": true
},
"timezone": "Europe/Moscow",
"sshKeys": [
"ssh-rsa KEY test@pc"
],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
}

View file

@ -1,54 +0,0 @@
{
"api": {
"token": "TEST_TOKEN",
"enableSwagger": false
},
"bitwarden": {
"enable": false
},
"databasePassword": "PASSWORD",
"domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD",
"hostname": "test-instance",
"nextcloud": {
"adminPassword": "ADMIN",
"databasePassword": "ADMIN",
"enable": true
},
"resticPassword": "PASS",
"ssh": {
"enable": true,
"passwordAuthentication": true,
"rootKeys": [
"ssh-ed25519 KEY test@pc"
]
},
"username": "tester",
"ocserv": {
"enable": true
},
"pleroma": {
"enable": true
},
"autoUpgrade": {
"enable": true,
"allowReboot": true
},
"timezone": "Europe/Moscow",
"sshKeys": [
"ssh-rsa KEY test@pc"
],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
}

View file

@ -1,102 +0,0 @@
import base64
import json
import pytest
from selfprivacy_api.utils import get_dkim_key
###############################################################################
class ProcessMock:
"""Mock subprocess.Popen"""
def __init__(self, args, **kwargs):
self.args = args
self.kwargs = kwargs
def communicate():
return (
b'selector._domainkey\tIN\tTXT\t( "v=DKIM1; k=rsa; "\n\t "p=MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDNn/IhEz1SxgHxxxI8vlPYC2dNueiLe1GC4SYz8uHimC8SDkMvAwm7rqi2SimbFgGB5nccCNOqCkrIqJTCB9vufqBnVKAjshHqpOr5hk4JJ1T/AGQKWinstmDbfTLPYTbU8ijZrwwGeqQLlnXR5nSN0GB9GazheA9zaPsT6PV+aQIDAQAB" ) ; ----- DKIM key selector for example.com\n',
None,
)
class NoFileMock(ProcessMock):
def communicate():
return (b"", None)
@pytest.fixture
def mock_subproccess_popen(mocker):
mock = mocker.patch("subprocess.Popen", autospec=True, return_value=ProcessMock)
mocker.patch(
"selfprivacy_api.rest.services.get_domain",
autospec=True,
return_value="example.com",
)
mocker.patch("os.path.exists", autospec=True, return_value=True)
return mock
@pytest.fixture
def mock_no_file(mocker):
mock = mocker.patch("subprocess.Popen", autospec=True, return_value=NoFileMock)
mocker.patch(
"selfprivacy_api.rest.services.get_domain",
autospec=True,
return_value="example.com",
)
mocker.patch("os.path.exists", autospec=True, return_value=False)
return mock
###############################################################################
def test_unauthorized(client, mock_subproccess_popen):
"""Test unauthorized"""
response = client.get("/services/mailserver/dkim")
assert response.status_code == 401
def test_illegal_methods(authorized_client, mock_subproccess_popen):
response = authorized_client.post("/services/mailserver/dkim")
assert response.status_code == 405
response = authorized_client.put("/services/mailserver/dkim")
assert response.status_code == 405
response = authorized_client.delete("/services/mailserver/dkim")
assert response.status_code == 405
def test_get_dkim_key(mock_subproccess_popen):
"""Test DKIM key"""
dkim_key = get_dkim_key("example.com")
assert (
dkim_key
== "v=DKIM1; k=rsa; p=MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDNn/IhEz1SxgHxxxI8vlPYC2dNueiLe1GC4SYz8uHimC8SDkMvAwm7rqi2SimbFgGB5nccCNOqCkrIqJTCB9vufqBnVKAjshHqpOr5hk4JJ1T/AGQKWinstmDbfTLPYTbU8ijZrwwGeqQLlnXR5nSN0GB9GazheA9zaPsT6PV+aQIDAQAB"
)
assert mock_subproccess_popen.call_args[0][0] == [
"cat",
"/var/dkim/example.com.selector.txt",
]
def test_dkim_key(authorized_client, mock_subproccess_popen):
"""Test old REST DKIM key endpoint"""
response = authorized_client.get("/services/mailserver/dkim")
assert response.status_code == 200
assert (
base64.b64decode(response.text)
== b'selector._domainkey\tIN\tTXT\t( "v=DKIM1; k=rsa; "\n\t "p=MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDNn/IhEz1SxgHxxxI8vlPYC2dNueiLe1GC4SYz8uHimC8SDkMvAwm7rqi2SimbFgGB5nccCNOqCkrIqJTCB9vufqBnVKAjshHqpOr5hk4JJ1T/AGQKWinstmDbfTLPYTbU8ijZrwwGeqQLlnXR5nSN0GB9GazheA9zaPsT6PV+aQIDAQAB" ) ; ----- DKIM key selector for example.com\n'
)
assert mock_subproccess_popen.call_args[0][0] == [
"cat",
"/var/dkim/example.com.selector.txt",
]
def test_no_dkim_key(authorized_client, mock_no_file):
"""Test no DKIM key"""
response = authorized_client.get("/services/mailserver/dkim")
assert response.status_code == 404
assert mock_no_file.called == False

View file

@ -1,123 +0,0 @@
import json
import pytest
def read_json(file_path):
with open(file_path, "r") as f:
return json.load(f)
###############################################################################
@pytest.fixture
def nextcloud_off(mocker, datadir):
mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_off.json")
assert read_json(datadir / "turned_off.json")["nextcloud"]["enable"] == False
return datadir
@pytest.fixture
def nextcloud_on(mocker, datadir):
mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_on.json")
assert read_json(datadir / "turned_on.json")["nextcloud"]["enable"] == True
return datadir
@pytest.fixture
def nextcloud_enable_undefined(mocker, datadir):
mocker.patch(
"selfprivacy_api.utils.USERDATA_FILE", new=datadir / "enable_undefined.json"
)
assert "enable" not in read_json(datadir / "enable_undefined.json")["nextcloud"]
return datadir
@pytest.fixture
def nextcloud_undefined(mocker, datadir):
mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "undefined.json")
assert "nextcloud" not in read_json(datadir / "undefined.json")
return datadir
###############################################################################
@pytest.mark.parametrize("endpoint", ["enable", "disable"])
def test_unauthorized(client, nextcloud_off, endpoint):
response = client.post(f"/services/nextcloud/{endpoint}")
assert response.status_code == 401
@pytest.mark.parametrize("endpoint", ["enable", "disable"])
def test_illegal_methods(authorized_client, nextcloud_off, endpoint):
response = authorized_client.get(f"/services/nextcloud/{endpoint}")
assert response.status_code == 405
response = authorized_client.put(f"/services/nextcloud/{endpoint}")
assert response.status_code == 405
response = authorized_client.delete(f"/services/nextcloud/{endpoint}")
assert response.status_code == 405
@pytest.mark.parametrize(
"endpoint,target_file",
[("enable", "turned_on.json"), ("disable", "turned_off.json")],
)
def test_switch_from_off(authorized_client, nextcloud_off, endpoint, target_file):
response = authorized_client.post(f"/services/nextcloud/{endpoint}")
assert response.status_code == 200
assert read_json(nextcloud_off / "turned_off.json") == read_json(
nextcloud_off / target_file
)
@pytest.mark.parametrize(
"endpoint,target_file",
[("enable", "turned_on.json"), ("disable", "turned_off.json")],
)
def test_switch_from_on(authorized_client, nextcloud_on, endpoint, target_file):
response = authorized_client.post(f"/services/nextcloud/{endpoint}")
assert response.status_code == 200
assert read_json(nextcloud_on / "turned_on.json") == read_json(
nextcloud_on / target_file
)
@pytest.mark.parametrize(
"endpoint,target_file",
[("enable", "turned_on.json"), ("disable", "turned_off.json")],
)
def test_switch_twice(authorized_client, nextcloud_off, endpoint, target_file):
response = authorized_client.post(f"/services/nextcloud/{endpoint}")
assert response.status_code == 200
response = authorized_client.post(f"/services/nextcloud/{endpoint}")
assert response.status_code == 200
assert read_json(nextcloud_off / "turned_off.json") == read_json(
nextcloud_off / target_file
)
@pytest.mark.parametrize(
"endpoint,target_file",
[("enable", "turned_on.json"), ("disable", "turned_off.json")],
)
def test_on_attribute_deleted(
authorized_client, nextcloud_enable_undefined, endpoint, target_file
):
response = authorized_client.post(f"/services/nextcloud/{endpoint}")
assert response.status_code == 200
assert read_json(nextcloud_enable_undefined / "enable_undefined.json") == read_json(
nextcloud_enable_undefined / target_file
)
@pytest.mark.parametrize("endpoint,target", [("enable", True), ("disable", False)])
def test_on_nextcloud_undefined(
authorized_client, nextcloud_undefined, endpoint, target
):
response = authorized_client.post(f"/services/nextcloud/{endpoint}")
assert response.status_code == 200
assert (
read_json(nextcloud_undefined / "undefined.json")["nextcloud"]["enable"]
== target
)

View file

@ -1,56 +0,0 @@
{
"api": {
"token": "TEST_TOKEN",
"enableSwagger": false
},
"bitwarden": {
"enable": false
},
"databasePassword": "PASSWORD",
"domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD",
"hostname": "test-instance",
"nextcloud": {
"adminPassword": "ADMIN",
"databasePassword": "ADMIN"
},
"resticPassword": "PASS",
"ssh": {
"enable": true,
"passwordAuthentication": true,
"rootKeys": [
"ssh-ed25519 KEY test@pc"
]
},
"username": "tester",
"gitea": {
"enable": false
},
"ocserv": {
"enable": true
},
"pleroma": {
"enable": true
},
"autoUpgrade": {
"enable": true,
"allowReboot": true
},
"timezone": "Europe/Moscow",
"sshKeys": [
"ssh-rsa KEY test@pc"
],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
}

View file

@ -1,57 +0,0 @@
{
"api": {
"token": "TEST_TOKEN",
"enableSwagger": false
},
"bitwarden": {
"enable": false
},
"databasePassword": "PASSWORD",
"domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD",
"hostname": "test-instance",
"nextcloud": {
"adminPassword": "ADMIN",
"databasePassword": "ADMIN",
"enable": false
},
"resticPassword": "PASS",
"ssh": {
"enable": true,
"passwordAuthentication": true,
"rootKeys": [
"ssh-ed25519 KEY test@pc"
]
},
"username": "tester",
"gitea": {
"enable": false
},
"ocserv": {
"enable": true
},
"pleroma": {
"enable": true
},
"autoUpgrade": {
"enable": true,
"allowReboot": true
},
"timezone": "Europe/Moscow",
"sshKeys": [
"ssh-rsa KEY test@pc"
],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
}

View file

@ -1,57 +0,0 @@
{
"api": {
"token": "TEST_TOKEN",
"enableSwagger": false
},
"bitwarden": {
"enable": false
},
"databasePassword": "PASSWORD",
"domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD",
"hostname": "test-instance",
"nextcloud": {
"adminPassword": "ADMIN",
"databasePassword": "ADMIN",
"enable": true
},
"resticPassword": "PASS",
"ssh": {
"enable": true,
"passwordAuthentication": true,
"rootKeys": [
"ssh-ed25519 KEY test@pc"
]
},
"username": "tester",
"gitea": {
"enable": false
},
"ocserv": {
"enable": true
},
"pleroma": {
"enable": true
},
"autoUpgrade": {
"enable": true,
"allowReboot": true
},
"timezone": "Europe/Moscow",
"sshKeys": [
"ssh-rsa KEY test@pc"
],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
}

View file

@ -1,49 +0,0 @@
{
"api": {
"token": "TEST_TOKEN",
"enableSwagger": false
},
"databasePassword": "PASSWORD",
"domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD",
"hostname": "test-instance",
"resticPassword": "PASS",
"ssh": {
"enable": true,
"passwordAuthentication": true,
"rootKeys": [
"ssh-ed25519 KEY test@pc"
]
},
"username": "tester",
"gitea": {
"enable": false
},
"ocserv": {
"enable": true
},
"pleroma": {
"enable": true
},
"autoUpgrade": {
"enable": true,
"allowReboot": true
},
"timezone": "Europe/Moscow",
"sshKeys": [
"ssh-rsa KEY test@pc"
],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
}

View file

@ -1,123 +0,0 @@
import json
import pytest
def read_json(file_path):
with open(file_path, "r") as f:
return json.load(f)
###############################################################################
@pytest.fixture
def ocserv_off(mocker, datadir):
mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_off.json")
assert read_json(datadir / "turned_off.json")["ocserv"]["enable"] == False
return datadir
@pytest.fixture
def ocserv_on(mocker, datadir):
mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_on.json")
assert read_json(datadir / "turned_on.json")["ocserv"]["enable"] == True
return datadir
@pytest.fixture
def ocserv_enable_undefined(mocker, datadir):
mocker.patch(
"selfprivacy_api.utils.USERDATA_FILE", new=datadir / "enable_undefined.json"
)
assert "enable" not in read_json(datadir / "enable_undefined.json")["ocserv"]
return datadir
@pytest.fixture
def ocserv_undefined(mocker, datadir):
mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "undefined.json")
assert "ocserv" not in read_json(datadir / "undefined.json")
return datadir
###############################################################################
@pytest.mark.parametrize("endpoint", ["enable", "disable"])
def test_unauthorized(client, ocserv_off, endpoint):
response = client.post(f"/services/ocserv/{endpoint}")
assert response.status_code == 401
@pytest.mark.parametrize("endpoint", ["enable", "disable"])
def test_illegal_methods(authorized_client, ocserv_off, endpoint):
response = authorized_client.get(f"/services/ocserv/{endpoint}")
assert response.status_code == 405
response = authorized_client.put(f"/services/ocserv/{endpoint}")
assert response.status_code == 405
response = authorized_client.delete(f"/services/ocserv/{endpoint}")
assert response.status_code == 405
@pytest.mark.parametrize(
"endpoint,target_file",
[("enable", "turned_on.json"), ("disable", "turned_off.json")],
)
def test_switch_from_off(authorized_client, ocserv_off, endpoint, target_file):
response = authorized_client.post(f"/services/ocserv/{endpoint}")
assert response.status_code == 200
assert read_json(ocserv_off / "turned_off.json") == read_json(
ocserv_off / target_file
)
@pytest.mark.parametrize(
"endpoint,target_file",
[("enable", "turned_on.json"), ("disable", "turned_off.json")],
)
def test_switch_from_on(authorized_client, ocserv_on, endpoint, target_file):
response = authorized_client.post(f"/services/ocserv/{endpoint}")
assert response.status_code == 200
assert read_json(ocserv_on / "turned_on.json") == read_json(ocserv_on / target_file)
@pytest.mark.parametrize(
"endpoint,target_file",
[("enable", "turned_on.json"), ("disable", "turned_off.json")],
)
def test_switch_twice(authorized_client, ocserv_off, endpoint, target_file):
response = authorized_client.post(f"/services/ocserv/{endpoint}")
assert response.status_code == 200
response = authorized_client.post(f"/services/ocserv/{endpoint}")
assert response.status_code == 200
assert read_json(ocserv_off / "turned_off.json") == read_json(
ocserv_off / target_file
)
@pytest.mark.parametrize(
"endpoint,target_file",
[("enable", "turned_on.json"), ("disable", "turned_off.json")],
)
def test_on_attribute_deleted(
authorized_client, ocserv_enable_undefined, endpoint, target_file
):
response = authorized_client.post(f"/services/ocserv/{endpoint}")
assert response.status_code == 200
assert read_json(ocserv_enable_undefined / "enable_undefined.json") == read_json(
ocserv_enable_undefined / target_file
)
@pytest.mark.parametrize(
"endpoint,target_file",
[("enable", "turned_on.json"), ("disable", "turned_off.json")],
)
def test_on_ocserv_undefined(
authorized_client, ocserv_undefined, endpoint, target_file
):
response = authorized_client.post(f"/services/ocserv/{endpoint}")
assert response.status_code == 200
assert read_json(ocserv_undefined / "undefined.json") == read_json(
ocserv_undefined / target_file
)

View file

@ -1,56 +0,0 @@
{
"api": {
"token": "TEST_TOKEN",
"enableSwagger": false
},
"bitwarden": {
"enable": false
},
"databasePassword": "PASSWORD",
"domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD",
"hostname": "test-instance",
"nextcloud": {
"adminPassword": "ADMIN",
"databasePassword": "ADMIN",
"enable": false
},
"resticPassword": "PASS",
"ssh": {
"enable": true,
"passwordAuthentication": true,
"rootKeys": [
"ssh-ed25519 KEY test@pc"
]
},
"username": "tester",
"gitea": {
"enable": false
},
"ocserv": {
},
"pleroma": {
"enable": true
},
"autoUpgrade": {
"enable": true,
"allowReboot": true
},
"timezone": "Europe/Moscow",
"sshKeys": [
"ssh-rsa KEY test@pc"
],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
}

View file

@ -1,57 +0,0 @@
{
"api": {
"token": "TEST_TOKEN",
"enableSwagger": false
},
"bitwarden": {
"enable": false
},
"databasePassword": "PASSWORD",
"domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD",
"hostname": "test-instance",
"nextcloud": {
"adminPassword": "ADMIN",
"databasePassword": "ADMIN",
"enable": false
},
"resticPassword": "PASS",
"ssh": {
"enable": true,
"passwordAuthentication": true,
"rootKeys": [
"ssh-ed25519 KEY test@pc"
]
},
"username": "tester",
"gitea": {
"enable": false
},
"ocserv": {
"enable": false
},
"pleroma": {
"enable": true
},
"autoUpgrade": {
"enable": true,
"allowReboot": true
},
"timezone": "Europe/Moscow",
"sshKeys": [
"ssh-rsa KEY test@pc"
],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
}

View file

@ -1,57 +0,0 @@
{
"api": {
"token": "TEST_TOKEN",
"enableSwagger": false
},
"bitwarden": {
"enable": false
},
"databasePassword": "PASSWORD",
"domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD",
"hostname": "test-instance",
"nextcloud": {
"adminPassword": "ADMIN",
"databasePassword": "ADMIN",
"enable": false
},
"resticPassword": "PASS",
"ssh": {
"enable": true,
"passwordAuthentication": true,
"rootKeys": [
"ssh-ed25519 KEY test@pc"
]
},
"username": "tester",
"gitea": {
"enable": false
},
"ocserv": {
"enable": true
},
"pleroma": {
"enable": true
},
"autoUpgrade": {
"enable": true,
"allowReboot": true
},
"timezone": "Europe/Moscow",
"sshKeys": [
"ssh-rsa KEY test@pc"
],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
}

View file

@ -1,54 +0,0 @@
{
"api": {
"token": "TEST_TOKEN",
"enableSwagger": false
},
"bitwarden": {
"enable": false
},
"databasePassword": "PASSWORD",
"domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD",
"hostname": "test-instance",
"nextcloud": {
"adminPassword": "ADMIN",
"databasePassword": "ADMIN",
"enable": false
},
"resticPassword": "PASS",
"ssh": {
"enable": true,
"passwordAuthentication": true,
"rootKeys": [
"ssh-ed25519 KEY test@pc"
]
},
"username": "tester",
"gitea": {
"enable": false
},
"pleroma": {
"enable": true
},
"autoUpgrade": {
"enable": true,
"allowReboot": true
},
"timezone": "Europe/Moscow",
"sshKeys": [
"ssh-rsa KEY test@pc"
],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
}

View file

@ -1,125 +0,0 @@
import json
import pytest
def read_json(file_path):
with open(file_path, "r") as f:
return json.load(f)
###############################################################################
@pytest.fixture
def pleroma_off(mocker, datadir):
mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_off.json")
assert read_json(datadir / "turned_off.json")["pleroma"]["enable"] == False
return datadir
@pytest.fixture
def pleroma_on(mocker, datadir):
mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_on.json")
assert read_json(datadir / "turned_on.json")["pleroma"]["enable"] == True
return datadir
@pytest.fixture
def pleroma_enable_undefined(mocker, datadir):
mocker.patch(
"selfprivacy_api.utils.USERDATA_FILE", new=datadir / "enable_undefined.json"
)
assert "enable" not in read_json(datadir / "enable_undefined.json")["pleroma"]
return datadir
@pytest.fixture
def pleroma_undefined(mocker, datadir):
mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "undefined.json")
assert "pleroma" not in read_json(datadir / "undefined.json")
return datadir
###############################################################################
@pytest.mark.parametrize("endpoint", ["enable", "disable"])
def test_unauthorized(client, pleroma_off, endpoint):
response = client.post(f"/services/pleroma/{endpoint}")
assert response.status_code == 401
@pytest.mark.parametrize("endpoint", ["enable", "disable"])
def test_illegal_methods(authorized_client, pleroma_off, endpoint):
response = authorized_client.get(f"/services/pleroma/{endpoint}")
assert response.status_code == 405
response = authorized_client.put(f"/services/pleroma/{endpoint}")
assert response.status_code == 405
response = authorized_client.delete(f"/services/pleroma/{endpoint}")
assert response.status_code == 405
@pytest.mark.parametrize(
"endpoint,target_file",
[("enable", "turned_on.json"), ("disable", "turned_off.json")],
)
def test_switch_from_off(authorized_client, pleroma_off, endpoint, target_file):
response = authorized_client.post(f"/services/pleroma/{endpoint}")
assert response.status_code == 200
assert read_json(pleroma_off / "turned_off.json") == read_json(
pleroma_off / target_file
)
@pytest.mark.parametrize(
"endpoint,target_file",
[("enable", "turned_on.json"), ("disable", "turned_off.json")],
)
def test_switch_from_on(authorized_client, pleroma_on, endpoint, target_file):
response = authorized_client.post(f"/services/pleroma/{endpoint}")
assert response.status_code == 200
assert read_json(pleroma_on / "turned_on.json") == read_json(
pleroma_on / target_file
)
@pytest.mark.parametrize(
"endpoint,target_file",
[("enable", "turned_on.json"), ("disable", "turned_off.json")],
)
def test_switch_twice(authorized_client, pleroma_off, endpoint, target_file):
response = authorized_client.post(f"/services/pleroma/{endpoint}")
assert response.status_code == 200
response = authorized_client.post(f"/services/pleroma/{endpoint}")
assert response.status_code == 200
assert read_json(pleroma_off / "turned_off.json") == read_json(
pleroma_off / target_file
)
@pytest.mark.parametrize(
"endpoint,target_file",
[("enable", "turned_on.json"), ("disable", "turned_off.json")],
)
def test_on_attribute_deleted(
authorized_client, pleroma_enable_undefined, endpoint, target_file
):
response = authorized_client.post(f"/services/pleroma/{endpoint}")
assert response.status_code == 200
assert read_json(pleroma_enable_undefined / "enable_undefined.json") == read_json(
pleroma_enable_undefined / target_file
)
@pytest.mark.parametrize(
"endpoint,target_file",
[("enable", "turned_on.json"), ("disable", "turned_off.json")],
)
def test_on_pleroma_undefined(
authorized_client, pleroma_undefined, endpoint, target_file
):
response = authorized_client.post(f"/services/pleroma/{endpoint}")
assert response.status_code == 200
assert read_json(pleroma_undefined / "undefined.json") == read_json(
pleroma_undefined / target_file
)

View file

@ -1,56 +0,0 @@
{
"api": {
"token": "TEST_TOKEN",
"enableSwagger": false
},
"bitwarden": {
"enable": false
},
"databasePassword": "PASSWORD",
"domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD",
"hostname": "test-instance",
"nextcloud": {
"adminPassword": "ADMIN",
"databasePassword": "ADMIN",
"enable": false
},
"resticPassword": "PASS",
"ssh": {
"enable": true,
"passwordAuthentication": true,
"rootKeys": [
"ssh-ed25519 KEY test@pc"
]
},
"username": "tester",
"gitea": {
"enable": false
},
"ocserv": {
"enable": false
},
"pleroma": {
},
"autoUpgrade": {
"enable": true,
"allowReboot": true
},
"timezone": "Europe/Moscow",
"sshKeys": [
"ssh-rsa KEY test@pc"
],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
}

View file

@ -1,57 +0,0 @@
{
"api": {
"token": "TEST_TOKEN",
"enableSwagger": false
},
"bitwarden": {
"enable": false
},
"databasePassword": "PASSWORD",
"domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD",
"hostname": "test-instance",
"nextcloud": {
"adminPassword": "ADMIN",
"databasePassword": "ADMIN",
"enable": false
},
"resticPassword": "PASS",
"ssh": {
"enable": true,
"passwordAuthentication": true,
"rootKeys": [
"ssh-ed25519 KEY test@pc"
]
},
"username": "tester",
"gitea": {
"enable": false
},
"ocserv": {
"enable": false
},
"pleroma": {
"enable": false
},
"autoUpgrade": {
"enable": true,
"allowReboot": true
},
"timezone": "Europe/Moscow",
"sshKeys": [
"ssh-rsa KEY test@pc"
],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
}

View file

@ -1,57 +0,0 @@
{
"api": {
"token": "TEST_TOKEN",
"enableSwagger": false
},
"bitwarden": {
"enable": false
},
"databasePassword": "PASSWORD",
"domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD",
"hostname": "test-instance",
"nextcloud": {
"adminPassword": "ADMIN",
"databasePassword": "ADMIN",
"enable": false
},
"resticPassword": "PASS",
"ssh": {
"enable": true,
"passwordAuthentication": true,
"rootKeys": [
"ssh-ed25519 KEY test@pc"
]
},
"username": "tester",
"gitea": {
"enable": false
},
"ocserv": {
"enable": false
},
"pleroma": {
"enable": true
},
"autoUpgrade": {
"enable": true,
"allowReboot": true
},
"timezone": "Europe/Moscow",
"sshKeys": [
"ssh-rsa KEY test@pc"
],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
}

View file

@ -1,54 +0,0 @@
{
"api": {
"token": "TEST_TOKEN",
"enableSwagger": false
},
"bitwarden": {
"enable": false
},
"databasePassword": "PASSWORD",
"domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD",
"hostname": "test-instance",
"nextcloud": {
"adminPassword": "ADMIN",
"databasePassword": "ADMIN",
"enable": false
},
"resticPassword": "PASS",
"ssh": {
"enable": true,
"passwordAuthentication": true,
"rootKeys": [
"ssh-ed25519 KEY test@pc"
]
},
"username": "tester",
"gitea": {
"enable": false
},
"ocserv": {
"enable": false
},
"autoUpgrade": {
"enable": true,
"allowReboot": true
},
"timezone": "Europe/Moscow",
"sshKeys": [
"ssh-rsa KEY test@pc"
],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
}

View file

@ -1,72 +0,0 @@
{
"api": {
"token": "TEST_TOKEN",
"enableSwagger": false
},
"bitwarden": {
"enable": false
},
"databasePassword": "PASSWORD",
"domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD",
"hostname": "test-instance",
"nextcloud": {
"adminPassword": "ADMIN",
"databasePassword": "ADMIN",
"enable": true
},
"resticPassword": "PASS",
"ssh": {
"enable": true,
"passwordAuthentication": true,
"rootKeys": [
"ssh-ed25519 KEY test@pc"
]
},
"username": "tester",
"gitea": {
"enable": false
},
"ocserv": {
"enable": true
},
"pleroma": {
"enable": true
},
"autoUpgrade": {
"enable": true,
"allowReboot": true
},
"timezone": "Europe/Moscow",
"sshKeys": [
"ssh-rsa KEY test@pc"
],
"users": [
{
"username": "user1",
"hashedPassword": "HASHED_PASSWORD_1",
"sshKeys": [
"ssh-rsa KEY user1@pc"
]
},
{
"username": "user2",
"hashedPassword": "HASHED_PASSWORD_2",
"sshKeys": [
]
},
{
"username": "user3",
"hashedPassword": "HASHED_PASSWORD_3"
}
],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
}
}

View file

@ -1,76 +0,0 @@
{
"api": {
"token": "TEST_TOKEN",
"enableSwagger": false
},
"bitwarden": {
"enable": false
},
"databasePassword": "PASSWORD",
"domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD",
"hostname": "test-instance",
"nextcloud": {
"adminPassword": "ADMIN",
"databasePassword": "ADMIN",
"enable": true
},
"resticPassword": "PASS",
"ssh": {
"enable": true,
"passwordAuthentication": true,
"rootKeys": [
"ssh-ed25519 KEY test@pc"
]
},
"username": "tester",
"gitea": {
"enable": false
},
"ocserv": {
"enable": true
},
"pleroma": {
"enable": true
},
"autoUpgrade": {
"enable": true,
"allowReboot": true
},
"timezone": "Europe/Moscow",
"sshKeys": [
"ssh-rsa KEY test@pc"
],
"users": [
{
"username": "user1",
"hashedPassword": "HASHED_PASSWORD_1",
"sshKeys": [
"ssh-rsa KEY user1@pc"
]
},
{
"username": "user2",
"hashedPassword": "HASHED_PASSWORD_2",
"sshKeys": [
]
},
{
"username": "user3",
"hashedPassword": "HASHED_PASSWORD_3"
}
],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "BUCKET"
}
}

View file

@ -1,70 +0,0 @@
{
"api": {
"token": "TEST_TOKEN",
"enableSwagger": false
},
"bitwarden": {
"enable": false
},
"databasePassword": "PASSWORD",
"domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD",
"hostname": "test-instance",
"nextcloud": {
"adminPassword": "ADMIN",
"databasePassword": "ADMIN",
"enable": true
},
"resticPassword": "PASS",
"ssh": {
"enable": true,
"passwordAuthentication": true,
"rootKeys": [
"ssh-ed25519 KEY test@pc"
]
},
"username": "tester",
"gitea": {
"enable": false
},
"ocserv": {
"enable": true
},
"pleroma": {
"enable": true
},
"autoUpgrade": {
"enable": true,
"allowReboot": true
},
"timezone": "Europe/Moscow",
"sshKeys": [
"ssh-rsa KEY test@pc"
],
"users": [
{
"username": "user1",
"hashedPassword": "HASHED_PASSWORD_1",
"sshKeys": [
"ssh-rsa KEY user1@pc"
]
},
{
"username": "user2",
"hashedPassword": "HASHED_PASSWORD_2",
"sshKeys": [
]
},
{
"username": "user3",
"hashedPassword": "HASHED_PASSWORD_3"
}
],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
}
}

View file

@ -1,138 +0,0 @@
import base64
import json
import pytest
def read_json(file_path):
with open(file_path, "r", encoding="utf-8") as file:
return json.load(file)
def call_args_asserts(mocked_object):
assert mocked_object.call_count == 7
assert mocked_object.call_args_list[0][0][0] == [
"systemctl",
"show",
"dovecot2.service",
]
assert mocked_object.call_args_list[1][0][0] == [
"systemctl",
"show",
"postfix.service",
]
assert mocked_object.call_args_list[2][0][0] == [
"systemctl",
"show",
"vaultwarden.service",
]
assert mocked_object.call_args_list[3][0][0] == [
"systemctl",
"show",
"gitea.service",
]
assert mocked_object.call_args_list[4][0][0] == [
"systemctl",
"show",
"phpfpm-nextcloud.service",
]
assert mocked_object.call_args_list[5][0][0] == [
"systemctl",
"show",
"ocserv.service",
]
assert mocked_object.call_args_list[6][0][0] == [
"systemctl",
"show",
"pleroma.service",
]
SUCCESSFUL_STATUS = b"""
Type=oneshot
ExitType=main
Restart=no
NotifyAccess=none
RestartUSec=100ms
LoadState=loaded
ActiveState=active
FreezerState=running
SubState=exited
"""
FAILED_STATUS = b"""
Type=oneshot
ExitType=main
Restart=no
NotifyAccess=none
RestartUSec=100ms
LoadState=loaded
ActiveState=failed
FreezerState=running
SubState=exited
"""
@pytest.fixture
def mock_subproccess_popen(mocker):
mock = mocker.patch(
"subprocess.check_output", autospec=True, return_value=SUCCESSFUL_STATUS
)
return mock
@pytest.fixture
def mock_broken_service(mocker):
mock = mocker.patch(
"subprocess.check_output", autospec=True, return_value=FAILED_STATUS
)
return mock
###############################################################################
def test_unauthorized(client, mock_subproccess_popen):
"""Test unauthorized"""
response = client.get("/services/status")
assert response.status_code == 401
def test_illegal_methods(authorized_client, mock_subproccess_popen):
response = authorized_client.post("/services/status")
assert response.status_code == 405
response = authorized_client.put("/services/status")
assert response.status_code == 405
response = authorized_client.delete("/services/status")
assert response.status_code == 405
def test_dkim_key(authorized_client, mock_subproccess_popen):
response = authorized_client.get("/services/status")
assert response.status_code == 200
assert response.json() == {
"imap": 0,
"smtp": 0,
"http": 0,
"bitwarden": 0,
"gitea": 0,
"nextcloud": 0,
"ocserv": 0,
"pleroma": 0,
}
call_args_asserts(mock_subproccess_popen)
def test_no_dkim_key(authorized_client, mock_broken_service):
response = authorized_client.get("/services/status")
assert response.status_code == 200
assert response.json() == {
"imap": 1,
"smtp": 1,
"http": 0,
"bitwarden": 1,
"gitea": 1,
"nextcloud": 1,
"ocserv": 1,
"pleroma": 1,
}
call_args_asserts(mock_broken_service)

View file

@ -1,521 +0,0 @@
# pylint: disable=redefined-outer-name
# pylint: disable=unused-argument
import json
import pytest
def read_json(file_path):
with open(file_path, "r", encoding="utf-8") as file:
return json.load(file)
## FIXTURES ###################################################
@pytest.fixture
def ssh_off(mocker, datadir):
mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_off.json")
assert not read_json(datadir / "turned_off.json")["ssh"]["enable"]
assert read_json(datadir / "turned_off.json")["ssh"]["passwordAuthentication"]
return datadir
@pytest.fixture
def ssh_on(mocker, datadir):
mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_on.json")
assert read_json(datadir / "turned_off.json")["ssh"]["passwordAuthentication"]
assert read_json(datadir / "turned_on.json")["ssh"]["enable"]
return datadir
@pytest.fixture
def all_off(mocker, datadir):
mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "all_off.json")
assert not read_json(datadir / "all_off.json")["ssh"]["passwordAuthentication"]
assert not read_json(datadir / "all_off.json")["ssh"]["enable"]
return datadir
@pytest.fixture
def undefined_settings(mocker, datadir):
mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "undefined.json")
assert "ssh" not in read_json(datadir / "undefined.json")
return datadir
@pytest.fixture
def undefined_values(mocker, datadir):
mocker.patch(
"selfprivacy_api.utils.USERDATA_FILE", new=datadir / "undefined_values.json"
)
assert "ssh" in read_json(datadir / "undefined_values.json")
assert "enable" not in read_json(datadir / "undefined_values.json")["ssh"]
assert (
"passwordAuthentication"
not in read_json(datadir / "undefined_values.json")["ssh"]
)
return datadir
@pytest.fixture
def root_and_admin_have_keys(mocker, datadir):
mocker.patch(
"selfprivacy_api.utils.USERDATA_FILE",
new=datadir / "root_and_admin_have_keys.json",
)
assert read_json(datadir / "root_and_admin_have_keys.json")["ssh"]["enable"]
assert read_json(datadir / "root_and_admin_have_keys.json")["ssh"][
"passwordAuthentication"
]
assert read_json(datadir / "root_and_admin_have_keys.json")["ssh"]["rootKeys"] == [
"ssh-ed25519 KEY test@pc"
]
assert read_json(datadir / "root_and_admin_have_keys.json")["sshKeys"] == [
"ssh-rsa KEY test@pc"
]
return datadir
@pytest.fixture
def some_users(mocker, datadir):
mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "some_users.json")
assert "users" in read_json(datadir / "some_users.json")
assert read_json(datadir / "some_users.json")["users"] == [
{
"username": "user1",
"hashedPassword": "HASHED_PASSWORD_1",
"sshKeys": ["ssh-rsa KEY user1@pc"],
},
{"username": "user2", "hashedPassword": "HASHED_PASSWORD_2", "sshKeys": []},
{"username": "user3", "hashedPassword": "HASHED_PASSWORD_3"},
]
return datadir
## TEST 401 ######################################################
@pytest.mark.parametrize("endpoint", ["ssh/enable", "ssh/keys/user"])
def test_unauthorized(client, ssh_off, endpoint):
response = client.post(f"/services/{endpoint}")
assert response.status_code == 401
@pytest.mark.parametrize("endpoint", ["ssh", "ssh/key/send"])
def test_unauthorized_put(client, ssh_off, endpoint):
response = client.put(f"/services/{endpoint}")
assert response.status_code == 401
## TEST ENABLE ######################################################
def test_legacy_enable(authorized_client, ssh_off):
response = authorized_client.post("/services/ssh/enable")
assert response.status_code == 200
assert read_json(ssh_off / "turned_off.json") == read_json(
ssh_off / "turned_on.json"
)
def test_legacy_on_undefined(authorized_client, undefined_settings):
response = authorized_client.post("/services/ssh/enable")
assert response.status_code == 200
data = read_json(undefined_settings / "undefined.json")
assert data["ssh"]["enable"] == True
def test_legacy_enable_when_enabled(authorized_client, ssh_on):
response = authorized_client.post("/services/ssh/enable")
assert response.status_code == 200
assert read_json(ssh_on / "turned_on.json") == read_json(ssh_on / "turned_on.json")
## GET ON /ssh ######################################################
def test_get_current_settings_ssh_off(authorized_client, ssh_off):
response = authorized_client.get("/services/ssh")
assert response.status_code == 200
assert response.json() == {"enable": False, "passwordAuthentication": True}
def test_get_current_settings_ssh_on(authorized_client, ssh_on):
response = authorized_client.get("/services/ssh")
assert response.status_code == 200
assert response.json() == {"enable": True, "passwordAuthentication": True}
def test_get_current_settings_all_off(authorized_client, all_off):
response = authorized_client.get("/services/ssh")
assert response.status_code == 200
assert response.json() == {"enable": False, "passwordAuthentication": False}
def test_get_current_settings_undefined(authorized_client, undefined_settings):
response = authorized_client.get("/services/ssh")
assert response.status_code == 200
assert response.json() == {"enable": True, "passwordAuthentication": True}
def test_get_current_settings_mostly_undefined(authorized_client, undefined_values):
response = authorized_client.get("/services/ssh")
assert response.status_code == 200
assert response.json() == {"enable": True, "passwordAuthentication": True}
## PUT ON /ssh ######################################################
available_settings = [
{"enable": True, "passwordAuthentication": True},
{"enable": True, "passwordAuthentication": False},
{"enable": False, "passwordAuthentication": True},
{"enable": False, "passwordAuthentication": False},
{"enable": True},
{"enable": False},
{"passwordAuthentication": True},
{"passwordAuthentication": False},
]
@pytest.mark.parametrize("settings", available_settings)
def test_set_settings_ssh_off(authorized_client, ssh_off, settings):
response = authorized_client.put("/services/ssh", json=settings)
assert response.status_code == 200
data = read_json(ssh_off / "turned_off.json")["ssh"]
if "enable" in settings:
assert data["enable"] == settings["enable"]
if "passwordAuthentication" in settings:
assert data["passwordAuthentication"] == settings["passwordAuthentication"]
@pytest.mark.parametrize("settings", available_settings)
def test_set_settings_ssh_on(authorized_client, ssh_on, settings):
response = authorized_client.put("/services/ssh", json=settings)
assert response.status_code == 200
data = read_json(ssh_on / "turned_on.json")["ssh"]
if "enable" in settings:
assert data["enable"] == settings["enable"]
if "passwordAuthentication" in settings:
assert data["passwordAuthentication"] == settings["passwordAuthentication"]
@pytest.mark.parametrize("settings", available_settings)
def test_set_settings_all_off(authorized_client, all_off, settings):
response = authorized_client.put("/services/ssh", json=settings)
assert response.status_code == 200
data = read_json(all_off / "all_off.json")["ssh"]
if "enable" in settings:
assert data["enable"] == settings["enable"]
if "passwordAuthentication" in settings:
assert data["passwordAuthentication"] == settings["passwordAuthentication"]
@pytest.mark.parametrize("settings", available_settings)
def test_set_settings_undefined(authorized_client, undefined_settings, settings):
response = authorized_client.put("/services/ssh", json=settings)
assert response.status_code == 200
data = read_json(undefined_settings / "undefined.json")["ssh"]
if "enable" in settings:
assert data["enable"] == settings["enable"]
if "passwordAuthentication" in settings:
assert data["passwordAuthentication"] == settings["passwordAuthentication"]
## PUT ON /ssh/key/send ######################################################
def test_add_root_key(authorized_client, ssh_on):
response = authorized_client.put(
"/services/ssh/key/send", json={"public_key": "ssh-rsa KEY test@pc"}
)
assert response.status_code == 201
assert read_json(ssh_on / "turned_on.json")["ssh"]["rootKeys"] == [
"ssh-rsa KEY test@pc",
]
def test_add_root_key_on_undefined(authorized_client, undefined_settings):
response = authorized_client.put(
"/services/ssh/key/send", json={"public_key": "ssh-rsa KEY test@pc"}
)
assert response.status_code == 201
data = read_json(undefined_settings / "undefined.json")
assert data["ssh"]["rootKeys"] == ["ssh-rsa KEY test@pc"]
def test_add_root_key_one_more(authorized_client, root_and_admin_have_keys):
response = authorized_client.put(
"/services/ssh/key/send", json={"public_key": "ssh-rsa KEY test@pc"}
)
assert response.status_code == 201
assert read_json(root_and_admin_have_keys / "root_and_admin_have_keys.json")["ssh"][
"rootKeys"
] == [
"ssh-ed25519 KEY test@pc",
"ssh-rsa KEY test@pc",
]
def test_add_existing_root_key(authorized_client, root_and_admin_have_keys):
response = authorized_client.put(
"/services/ssh/key/send", json={"public_key": "ssh-ed25519 KEY test@pc"}
)
assert response.status_code == 409
assert read_json(root_and_admin_have_keys / "root_and_admin_have_keys.json")["ssh"][
"rootKeys"
] == [
"ssh-ed25519 KEY test@pc",
]
def test_add_invalid_root_key(authorized_client, ssh_on):
response = authorized_client.put(
"/services/ssh/key/send", json={"public_key": "INVALID KEY test@pc"}
)
assert response.status_code == 400
## /ssh/keys/{user} ######################################################
def test_get_root_key(authorized_client, root_and_admin_have_keys):
response = authorized_client.get("/services/ssh/keys/root")
assert response.status_code == 200
assert response.json() == ["ssh-ed25519 KEY test@pc"]
def test_get_root_key_when_none(authorized_client, ssh_on):
response = authorized_client.get("/services/ssh/keys/root")
assert response.status_code == 200
assert response.json() == []
def test_get_root_key_on_undefined(authorized_client, undefined_settings):
response = authorized_client.get("/services/ssh/keys/root")
assert response.status_code == 200
assert response.json() == []
def test_delete_root_key(authorized_client, root_and_admin_have_keys):
response = authorized_client.delete(
"/services/ssh/keys/root", json={"public_key": "ssh-ed25519 KEY test@pc"}
)
assert response.status_code == 200
assert (
"rootKeys"
not in read_json(root_and_admin_have_keys / "root_and_admin_have_keys.json")[
"ssh"
]
or read_json(root_and_admin_have_keys / "root_and_admin_have_keys.json")["ssh"][
"rootKeys"
]
== []
)
def test_delete_root_nonexistent_key(authorized_client, root_and_admin_have_keys):
response = authorized_client.delete(
"/services/ssh/keys/root", json={"public_key": "ssh-rsa KEY test@pc"}
)
assert response.status_code == 404
assert read_json(root_and_admin_have_keys / "root_and_admin_have_keys.json")["ssh"][
"rootKeys"
] == [
"ssh-ed25519 KEY test@pc",
]
def test_delete_root_key_on_undefined(authorized_client, undefined_settings):
response = authorized_client.delete(
"/services/ssh/keys/root", json={"public_key": "ssh-ed25519 KEY test@pc"}
)
assert response.status_code == 404
assert "ssh" not in read_json(undefined_settings / "undefined.json")
def test_get_admin_key(authorized_client, root_and_admin_have_keys):
response = authorized_client.get("/services/ssh/keys/tester")
assert response.status_code == 200
assert response.json() == ["ssh-rsa KEY test@pc"]
def test_get_admin_key_when_none(authorized_client, ssh_on):
response = authorized_client.get("/services/ssh/keys/tester")
assert response.status_code == 200
assert response.json() == []
def test_delete_admin_key(authorized_client, root_and_admin_have_keys):
response = authorized_client.delete(
"/services/ssh/keys/tester", json={"public_key": "ssh-rsa KEY test@pc"}
)
assert response.status_code == 200
assert (
read_json(root_and_admin_have_keys / "root_and_admin_have_keys.json")["sshKeys"]
== []
)
def test_delete_nonexistent_admin_key(authorized_client, root_and_admin_have_keys):
response = authorized_client.delete(
"/services/ssh/keys/tester", json={"public_key": "ssh-rsa NO KEY test@pc"}
)
assert response.status_code == 404
assert read_json(root_and_admin_have_keys / "root_and_admin_have_keys.json")[
"sshKeys"
] == ["ssh-rsa KEY test@pc"]
def test_delete_admin_key_on_undefined(authorized_client, undefined_settings):
response = authorized_client.delete(
"/services/ssh/keys/tester", json={"public_key": "ssh-rsa KEY test@pc"}
)
assert response.status_code == 404
assert "sshKeys" not in read_json(undefined_settings / "undefined.json")
def test_add_admin_key(authorized_client, ssh_on):
response = authorized_client.post(
"/services/ssh/keys/tester", json={"public_key": "ssh-rsa KEY test@pc"}
)
assert response.status_code == 201
assert read_json(ssh_on / "turned_on.json")["sshKeys"] == [
"ssh-rsa KEY test@pc",
]
def test_add_admin_key_one_more(authorized_client, root_and_admin_have_keys):
response = authorized_client.post(
"/services/ssh/keys/tester", json={"public_key": "ssh-rsa KEY_2 test@pc"}
)
assert response.status_code == 201
assert read_json(root_and_admin_have_keys / "root_and_admin_have_keys.json")[
"sshKeys"
] == ["ssh-rsa KEY test@pc", "ssh-rsa KEY_2 test@pc"]
def test_add_existing_admin_key(authorized_client, root_and_admin_have_keys):
response = authorized_client.post(
"/services/ssh/keys/tester", json={"public_key": "ssh-rsa KEY test@pc"}
)
assert response.status_code == 409
assert read_json(root_and_admin_have_keys / "root_and_admin_have_keys.json")[
"sshKeys"
] == [
"ssh-rsa KEY test@pc",
]
def test_add_invalid_admin_key(authorized_client, ssh_on):
response = authorized_client.post(
"/services/ssh/keys/tester", json={"public_key": "INVALID KEY test@pc"}
)
assert response.status_code == 400
@pytest.mark.parametrize("user", [1, 2, 3])
def test_get_user_key(authorized_client, some_users, user):
response = authorized_client.get(f"/services/ssh/keys/user{user}")
assert response.status_code == 200
if user == 1:
assert response.json() == ["ssh-rsa KEY user1@pc"]
else:
assert response.json() == []
def test_get_keys_of_nonexistent_user(authorized_client, some_users):
response = authorized_client.get("/services/ssh/keys/user4")
assert response.status_code == 404
def test_get_keys_of_undefined_users(authorized_client, undefined_settings):
response = authorized_client.get("/services/ssh/keys/user1")
assert response.status_code == 404
@pytest.mark.parametrize("user", [1, 2, 3])
def test_add_user_key(authorized_client, some_users, user):
response = authorized_client.post(
f"/services/ssh/keys/user{user}", json={"public_key": "ssh-ed25519 KEY test@pc"}
)
assert response.status_code == 201
if user == 1:
assert read_json(some_users / "some_users.json")["users"][user - 1][
"sshKeys"
] == [
"ssh-rsa KEY user1@pc",
"ssh-ed25519 KEY test@pc",
]
else:
assert read_json(some_users / "some_users.json")["users"][user - 1][
"sshKeys"
] == ["ssh-ed25519 KEY test@pc"]
def test_add_existing_user_key(authorized_client, some_users):
response = authorized_client.post(
"/services/ssh/keys/user1", json={"public_key": "ssh-rsa KEY user1@pc"}
)
assert response.status_code == 409
assert read_json(some_users / "some_users.json")["users"][0]["sshKeys"] == [
"ssh-rsa KEY user1@pc",
]
def test_add_invalid_user_key(authorized_client, some_users):
response = authorized_client.post(
"/services/ssh/keys/user1", json={"public_key": "INVALID KEY user1@pc"}
)
assert response.status_code == 400
def test_delete_user_key(authorized_client, some_users):
response = authorized_client.delete(
"/services/ssh/keys/user1", json={"public_key": "ssh-rsa KEY user1@pc"}
)
assert response.status_code == 200
assert read_json(some_users / "some_users.json")["users"][0]["sshKeys"] == []
@pytest.mark.parametrize("user", [2, 3])
def test_delete_nonexistent_user_key(authorized_client, some_users, user):
response = authorized_client.delete(
f"/services/ssh/keys/user{user}", json={"public_key": "ssh-rsa KEY user1@pc"}
)
assert response.status_code == 404
if user == 2:
assert (
read_json(some_users / "some_users.json")["users"][user - 1]["sshKeys"]
== []
)
if user == 3:
"sshKeys" not in read_json(some_users / "some_users.json")["users"][user - 1]
def test_add_keys_of_nonexistent_user(authorized_client, some_users):
response = authorized_client.post(
"/services/ssh/keys/user4", json={"public_key": "ssh-rsa KEY user4@pc"}
)
assert response.status_code == 404
def test_add_key_on_undefined_users(authorized_client, undefined_settings):
response = authorized_client.post(
"/services/ssh/keys/user1", json={"public_key": "ssh-rsa KEY user4@pc"}
)
assert response.status_code == 404
def test_delete_keys_of_nonexistent_user(authorized_client, some_users):
response = authorized_client.delete(
"/services/ssh/keys/user4", json={"public_key": "ssh-rsa KEY user4@pc"}
)
assert response.status_code == 404
def test_delete_key_when_undefined_users(authorized_client, undefined_settings):
response = authorized_client.delete(
"/services/ssh/keys/user1", json={"public_key": "ssh-rsa KEY user1@pc"}
)
assert response.status_code == 404

View file

@ -1,57 +0,0 @@
{
"api": {
"token": "TEST_TOKEN",
"enableSwagger": false
},
"bitwarden": {
"enable": false
},
"databasePassword": "PASSWORD",
"domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD",
"hostname": "test-instance",
"nextcloud": {
"adminPassword": "ADMIN",
"databasePassword": "ADMIN",
"enable": true
},
"resticPassword": "PASS",
"ssh": {
"enable": false,
"passwordAuthentication": false,
"rootKeys": [
"ssh-ed25519 KEY test@pc"
]
},
"username": "tester",
"gitea": {
"enable": false
},
"ocserv": {
"enable": true
},
"pleroma": {
"enable": true
},
"autoUpgrade": {
"enable": true,
"allowReboot": true
},
"timezone": "Europe/Moscow",
"sshKeys": [
"ssh-rsa KEY test@pc"
],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
}

View file

@ -1,57 +0,0 @@
{
"api": {
"token": "TEST_TOKEN",
"enableSwagger": false
},
"bitwarden": {
"enable": false
},
"databasePassword": "PASSWORD",
"domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD",
"hostname": "test-instance",
"nextcloud": {
"adminPassword": "ADMIN",
"databasePassword": "ADMIN",
"enable": true
},
"resticPassword": "PASS",
"ssh": {
"enable": true,
"passwordAuthentication": true,
"rootKeys": [
"ssh-ed25519 KEY test@pc"
]
},
"username": "tester",
"gitea": {
"enable": false
},
"ocserv": {
"enable": true
},
"pleroma": {
"enable": true
},
"autoUpgrade": {
"enable": true,
"allowReboot": true
},
"timezone": "Europe/Moscow",
"sshKeys": [
"ssh-rsa KEY test@pc"
],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
}

View file

@ -1,76 +0,0 @@
{
"api": {
"token": "TEST_TOKEN",
"enableSwagger": false
},
"bitwarden": {
"enable": false
},
"databasePassword": "PASSWORD",
"domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD",
"hostname": "test-instance",
"nextcloud": {
"adminPassword": "ADMIN",
"databasePassword": "ADMIN",
"enable": true
},
"resticPassword": "PASS",
"ssh": {
"enable": true,
"passwordAuthentication": true,
"rootKeys": [
"ssh-ed25519 KEY test@pc"
]
},
"username": "tester",
"gitea": {
"enable": false
},
"ocserv": {
"enable": true
},
"pleroma": {
"enable": true
},
"autoUpgrade": {
"enable": true,
"allowReboot": true
},
"timezone": "Europe/Moscow",
"sshKeys": [
"ssh-rsa KEY test@pc"
],
"users": [
{
"username": "user1",
"hashedPassword": "HASHED_PASSWORD_1",
"sshKeys": [
"ssh-rsa KEY user1@pc"
]
},
{
"username": "user2",
"hashedPassword": "HASHED_PASSWORD_2",
"sshKeys": [
]
},
{
"username": "user3",
"hashedPassword": "HASHED_PASSWORD_3"
}
],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
}

View file

@ -1,51 +0,0 @@
{
"api": {
"token": "TEST_TOKEN",
"enableSwagger": false
},
"bitwarden": {
"enable": false
},
"databasePassword": "PASSWORD",
"domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD",
"hostname": "test-instance",
"nextcloud": {
"adminPassword": "ADMIN",
"databasePassword": "ADMIN",
"enable": true
},
"resticPassword": "PASS",
"ssh": {
"enable": false,
"passwordAuthentication": true
},
"username": "tester",
"gitea": {
"enable": false
},
"ocserv": {
"enable": true
},
"pleroma": {
"enable": true
},
"autoUpgrade": {
"enable": true,
"allowReboot": true
},
"timezone": "Europe/Moscow",
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
}

View file

@ -1,51 +0,0 @@
{
"api": {
"token": "TEST_TOKEN",
"enableSwagger": false
},
"bitwarden": {
"enable": false
},
"databasePassword": "PASSWORD",
"domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD",
"hostname": "test-instance",
"nextcloud": {
"adminPassword": "ADMIN",
"databasePassword": "ADMIN",
"enable": true
},
"resticPassword": "PASS",
"ssh": {
"enable": true,
"passwordAuthentication": true
},
"username": "tester",
"gitea": {
"enable": false
},
"ocserv": {
"enable": true
},
"pleroma": {
"enable": true
},
"autoUpgrade": {
"enable": true,
"allowReboot": true
},
"timezone": "Europe/Moscow",
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
}

View file

@ -1,47 +0,0 @@
{
"api": {
"token": "TEST_TOKEN",
"enableSwagger": false
},
"bitwarden": {
"enable": false
},
"databasePassword": "PASSWORD",
"domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD",
"hostname": "test-instance",
"nextcloud": {
"adminPassword": "ADMIN",
"databasePassword": "ADMIN",
"enable": true
},
"resticPassword": "PASS",
"username": "tester",
"gitea": {
"enable": false
},
"ocserv": {
"enable": true
},
"pleroma": {
"enable": true
},
"autoUpgrade": {
"enable": true,
"allowReboot": true
},
"timezone": "Europe/Moscow",
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
}

View file

@ -1,51 +0,0 @@
{
"api": {
"token": "TEST_TOKEN",
"enableSwagger": false
},
"bitwarden": {
"enable": false
},
"databasePassword": "PASSWORD",
"domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD",
"hostname": "test-instance",
"nextcloud": {
"adminPassword": "ADMIN",
"databasePassword": "ADMIN",
"enable": true
},
"resticPassword": "PASS",
"ssh": {},
"username": "tester",
"gitea": {
"enable": false
},
"ocserv": {
"enable": true
},
"pleroma": {
"enable": true
},
"autoUpgrade": {
"enable": true,
"allowReboot": true
},
"timezone": "Europe/Moscow",
"sshKeys": [
"ssh-rsa KEY test@pc"
],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
}

View file

@ -1,457 +0,0 @@
# pylint: disable=redefined-outer-name
# pylint: disable=unused-argument
# pylint: disable=missing-function-docstring
import datetime
from datetime import timezone
import pytest
from tests.conftest import TOKENS_FILE_CONTENTS
from tests.common import (
RECOVERY_KEY_VALIDATION_DATETIME,
DEVICE_KEY_VALIDATION_DATETIME,
NearFuture,
assert_recovery_recent,
)
from tests.common import five_minutes_into_future_naive_utc as five_minutes_into_future
from tests.common import five_minutes_into_past_naive_utc as five_minutes_into_past
DATE_FORMATS = [
"%Y-%m-%dT%H:%M:%S.%fZ",
"%Y-%m-%dT%H:%M:%S.%f",
"%Y-%m-%d %H:%M:%S.%fZ",
"%Y-%m-%d %H:%M:%S.%f",
]
def assert_original(client):
new_tokens = rest_get_tokens_info(client)
for token in TOKENS_FILE_CONTENTS["tokens"]:
assert_token_valid(client, token["token"])
for new_token in new_tokens:
if new_token["name"] == token["name"]:
assert (
datetime.datetime.fromisoformat(new_token["date"]) == token["date"]
)
assert_no_recovery(client)
def assert_token_valid(client, token):
client.headers.update({"Authorization": "Bearer " + token})
assert rest_get_tokens_info(client) is not None
def rest_get_tokens_info(client):
response = client.get("/auth/tokens")
assert response.status_code == 200
return response.json()
def rest_try_authorize_new_device(client, token, device_name):
response = client.post(
"/auth/new_device/authorize",
json={
"token": token,
"device": device_name,
},
)
return response
def rest_make_recovery_token(client, expires_at=None, timeformat=None, uses=None):
json = {}
if expires_at is not None:
assert timeformat is not None
expires_at_str = expires_at.strftime(timeformat)
json["expiration"] = expires_at_str
if uses is not None:
json["uses"] = uses
if json == {}:
response = client.post("/auth/recovery_token")
else:
response = client.post(
"/auth/recovery_token",
json=json,
)
if not response.status_code == 200:
raise ValueError(response.reason, response.text, response.json()["detail"])
assert response.status_code == 200
assert "token" in response.json()
return response.json()["token"]
def rest_get_recovery_status(client):
response = client.get("/auth/recovery_token")
assert response.status_code == 200
return response.json()
def rest_get_recovery_date(client):
status = rest_get_recovery_status(client)
assert "date" in status
return status["date"]
def assert_no_recovery(client):
assert not rest_get_recovery_status(client)["exists"]
def rest_recover_with_mnemonic(client, mnemonic_token, device_name):
recovery_response = client.post(
"/auth/recovery_token/use",
json={"token": mnemonic_token, "device": device_name},
)
assert recovery_response.status_code == 200
new_token = recovery_response.json()["token"]
assert_token_valid(client, new_token)
return new_token
# Tokens
def test_get_tokens_info(authorized_client, tokens_file):
assert sorted(rest_get_tokens_info(authorized_client), key=lambda x: x["name"]) == [
{"name": "test_token", "date": "2022-01-14T08:31:10.789314", "is_caller": True},
{
"name": "test_token2",
"date": "2022-01-14T08:31:10.789314",
"is_caller": False,
},
]
def test_get_tokens_unauthorized(client, tokens_file):
response = client.get("/auth/tokens")
assert response.status_code == 401
def test_delete_token_unauthorized(client, authorized_client, tokens_file):
response = client.delete("/auth/tokens")
assert response.status_code == 401
assert_original(authorized_client)
def test_delete_token(authorized_client, tokens_file):
response = authorized_client.delete(
"/auth/tokens", json={"token_name": "test_token2"}
)
assert response.status_code == 200
assert rest_get_tokens_info(authorized_client) == [
{"name": "test_token", "date": "2022-01-14T08:31:10.789314", "is_caller": True}
]
def test_delete_self_token(authorized_client, tokens_file):
response = authorized_client.delete(
"/auth/tokens", json={"token_name": "test_token"}
)
assert response.status_code == 400
assert_original(authorized_client)
def test_delete_nonexistent_token(authorized_client, tokens_file):
response = authorized_client.delete(
"/auth/tokens", json={"token_name": "test_token3"}
)
assert response.status_code == 404
assert_original(authorized_client)
def test_refresh_token_unauthorized(client, authorized_client, tokens_file):
response = client.post("/auth/tokens")
assert response.status_code == 401
assert_original(authorized_client)
def test_refresh_token(authorized_client, tokens_file):
response = authorized_client.post("/auth/tokens")
assert response.status_code == 200
new_token = response.json()["token"]
assert_token_valid(authorized_client, new_token)
# New device
def test_get_new_device_auth_token_unauthorized(client, authorized_client, tokens_file):
response = client.post("/auth/new_device")
assert response.status_code == 401
assert "token" not in response.json()
assert "detail" in response.json()
# We only can check existence of a token we know.
def test_get_and_delete_new_device_token(client, authorized_client, tokens_file):
token = rest_get_new_device_token(authorized_client)
response = authorized_client.delete("/auth/new_device", json={"token": token})
assert response.status_code == 200
assert rest_try_authorize_new_device(client, token, "new_device").status_code == 404
def test_delete_token_unauthenticated(client, authorized_client, tokens_file):
token = rest_get_new_device_token(authorized_client)
response = client.delete("/auth/new_device", json={"token": token})
assert response.status_code == 401
assert rest_try_authorize_new_device(client, token, "new_device").status_code == 200
def rest_get_new_device_token(client):
response = client.post("/auth/new_device")
assert response.status_code == 200
assert "token" in response.json()
return response.json()["token"]
def test_get_and_authorize_new_device(client, authorized_client, tokens_file):
token = rest_get_new_device_token(authorized_client)
response = rest_try_authorize_new_device(client, token, "new_device")
assert response.status_code == 200
assert_token_valid(authorized_client, response.json()["token"])
def test_authorize_new_device_with_invalid_token(
client, authorized_client, tokens_file
):
response = rest_try_authorize_new_device(client, "invalid_token", "new_device")
assert response.status_code == 404
assert_original(authorized_client)
def test_get_and_authorize_used_token(client, authorized_client, tokens_file):
token_to_be_used_2_times = rest_get_new_device_token(authorized_client)
response = rest_try_authorize_new_device(
client, token_to_be_used_2_times, "new_device"
)
assert response.status_code == 200
assert_token_valid(authorized_client, response.json()["token"])
response = rest_try_authorize_new_device(
client, token_to_be_used_2_times, "new_device"
)
assert response.status_code == 404
def test_get_and_authorize_token_after_12_minutes(
client, authorized_client, tokens_file, mocker
):
token = rest_get_new_device_token(authorized_client)
# TARDIS sounds
mock = mocker.patch(DEVICE_KEY_VALIDATION_DATETIME, NearFuture)
response = rest_try_authorize_new_device(client, token, "new_device")
assert response.status_code == 404
assert_original(authorized_client)
def test_authorize_without_token(client, authorized_client, tokens_file):
response = client.post(
"/auth/new_device/authorize",
json={"device": "new_device"},
)
assert response.status_code == 422
assert_original(authorized_client)
# Recovery tokens
# GET /auth/recovery_token returns token status
# - if token is valid, returns 200 and token status
# - token status:
# - exists (boolean)
# - valid (boolean)
# - date (string)
# - expiration (string)
# - uses_left (int)
# - if token is invalid, returns 400 and empty body
# POST /auth/recovery_token generates a new token
# has two optional parameters:
# - expiration (string in datetime format)
# - uses_left (int)
# POST /auth/recovery_token/use uses the token
# required arguments:
# - token (string)
# - device (string)
# - if token is valid, returns 200 and token
# - if token is invalid, returns 404
# - if request is invalid, returns 400
def test_get_recovery_token_status_unauthorized(client, authorized_client, tokens_file):
response = client.get("/auth/recovery_token")
assert response.status_code == 401
assert_original(authorized_client)
def test_get_recovery_token_when_none_exists(authorized_client, tokens_file):
response = authorized_client.get("/auth/recovery_token")
assert response.status_code == 200
assert response.json() == {
"exists": False,
"valid": False,
"date": None,
"expiration": None,
"uses_left": None,
}
assert_original(authorized_client)
def test_generate_recovery_token(authorized_client, client, tokens_file):
# Generate token without expiration and uses_left
mnemonic_token = rest_make_recovery_token(authorized_client)
time_generated = rest_get_recovery_date(authorized_client)
assert_recovery_recent(time_generated)
assert rest_get_recovery_status(authorized_client) == {
"exists": True,
"valid": True,
"date": time_generated,
"expiration": None,
"uses_left": None,
}
rest_recover_with_mnemonic(client, mnemonic_token, "recover_device")
# And again
rest_recover_with_mnemonic(client, mnemonic_token, "recover_device2")
@pytest.mark.parametrize("timeformat", DATE_FORMATS)
def test_generate_recovery_token_with_expiration_date(
authorized_client, client, tokens_file, timeformat, mocker
):
# Generate token with expiration date
# Generate expiration date in the future
expiration_date = five_minutes_into_future()
mnemonic_token = rest_make_recovery_token(
authorized_client, expires_at=expiration_date, timeformat=timeformat
)
time_generated = rest_get_recovery_date(authorized_client)
assert_recovery_recent(time_generated)
assert rest_get_recovery_status(authorized_client) == {
"exists": True,
"valid": True,
"date": time_generated,
"expiration": expiration_date.replace(tzinfo=timezone.utc).isoformat(),
"uses_left": None,
}
rest_recover_with_mnemonic(client, mnemonic_token, "recover_device")
# And again
rest_recover_with_mnemonic(client, mnemonic_token, "recover_device2")
# Try to use token after expiration date
mock = mocker.patch(RECOVERY_KEY_VALIDATION_DATETIME, NearFuture)
device_name = "recovery_device3"
recovery_response = client.post(
"/auth/recovery_token/use",
json={"token": mnemonic_token, "device": device_name},
)
assert recovery_response.status_code == 404
# Assert that the token was not created
assert device_name not in [
token["name"] for token in rest_get_tokens_info(authorized_client)
]
@pytest.mark.parametrize("timeformat", DATE_FORMATS)
def test_generate_recovery_token_with_expiration_in_the_past(
authorized_client, tokens_file, timeformat
):
# Server must return 400 if expiration date is in the past
expiration_date = five_minutes_into_past()
expiration_date_str = expiration_date.strftime(timeformat)
response = authorized_client.post(
"/auth/recovery_token",
json={"expiration": expiration_date_str},
)
assert response.status_code == 400
assert_no_recovery(authorized_client)
def test_generate_recovery_token_with_invalid_time_format(
authorized_client, tokens_file
):
# Server must return 400 if expiration date is in the past
expiration_date = "invalid_time_format"
response = authorized_client.post(
"/auth/recovery_token",
json={"expiration": expiration_date},
)
assert response.status_code == 422
assert_no_recovery(authorized_client)
def test_generate_recovery_token_with_limited_uses(
authorized_client, client, tokens_file
):
# Generate token with limited uses
mnemonic_token = rest_make_recovery_token(authorized_client, uses=2)
time_generated = rest_get_recovery_date(authorized_client)
assert_recovery_recent(time_generated)
assert rest_get_recovery_status(authorized_client) == {
"exists": True,
"valid": True,
"date": time_generated,
"expiration": None,
"uses_left": 2,
}
# Try to use the token
rest_recover_with_mnemonic(client, mnemonic_token, "recover_device")
assert rest_get_recovery_status(authorized_client) == {
"exists": True,
"valid": True,
"date": time_generated,
"expiration": None,
"uses_left": 1,
}
# Try to use token again
rest_recover_with_mnemonic(client, mnemonic_token, "recover_device2")
assert rest_get_recovery_status(authorized_client) == {
"exists": True,
"valid": False,
"date": time_generated,
"expiration": None,
"uses_left": 0,
}
# Try to use token after limited uses
recovery_response = client.post(
"/auth/recovery_token/use",
json={"token": mnemonic_token, "device": "recovery_device3"},
)
assert recovery_response.status_code == 404
def test_generate_recovery_token_with_negative_uses(
authorized_client, client, tokens_file
):
# Generate token with limited uses
response = authorized_client.post(
"/auth/recovery_token",
json={"uses": -2},
)
assert response.status_code == 400
assert_no_recovery(authorized_client)
def test_generate_recovery_token_with_zero_uses(authorized_client, client, tokens_file):
# Generate token with limited uses
response = authorized_client.post(
"/auth/recovery_token",
json={"uses": 0},
)
assert response.status_code == 400
assert_no_recovery(authorized_client)

View file

@ -1,416 +0,0 @@
# pylint: disable=redefined-outer-name
# pylint: disable=unused-argument
# pylint: disable=missing-function-docstring
import json
import os
import pytest
from selfprivacy_api.utils import get_domain
def read_json(file_path):
with open(file_path, "r", encoding="utf-8") as file:
return json.load(file)
@pytest.fixture
def domain_file(mocker, datadir):
mocker.patch("selfprivacy_api.utils.DOMAIN_FILE", datadir / "domain")
return datadir
@pytest.fixture
def turned_on(mocker, datadir):
mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_on.json")
assert read_json(datadir / "turned_on.json")["autoUpgrade"]["enable"] == True
assert read_json(datadir / "turned_on.json")["autoUpgrade"]["allowReboot"] == True
assert read_json(datadir / "turned_on.json")["timezone"] == "Europe/Moscow"
return datadir
@pytest.fixture
def turned_off(mocker, datadir):
mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_off.json")
assert read_json(datadir / "turned_off.json")["autoUpgrade"]["enable"] == False
assert read_json(datadir / "turned_off.json")["autoUpgrade"]["allowReboot"] == False
assert read_json(datadir / "turned_off.json")["timezone"] == "Europe/Moscow"
return datadir
@pytest.fixture
def undefined_config(mocker, datadir):
mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "undefined.json")
assert "autoUpgrade" not in read_json(datadir / "undefined.json")
assert "timezone" not in read_json(datadir / "undefined.json")
return datadir
@pytest.fixture
def no_values(mocker, datadir):
mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "no_values.json")
assert "enable" not in read_json(datadir / "no_values.json")["autoUpgrade"]
assert "allowReboot" not in read_json(datadir / "no_values.json")["autoUpgrade"]
return datadir
class ProcessMock:
"""Mock subprocess.Popen"""
def __init__(self, args, **kwargs):
self.args = args
self.kwargs = kwargs
def communicate():
return (b"", None)
returncode = 0
class BrokenServiceMock(ProcessMock):
"""Mock subprocess.Popen"""
def communicate():
return (b"Testing error", None)
returncode = 3
@pytest.fixture
def mock_subprocess_popen(mocker):
mock = mocker.patch("subprocess.Popen", autospec=True, return_value=ProcessMock)
return mock
@pytest.fixture
def mock_os_chdir(mocker):
mock = mocker.patch("os.chdir", autospec=True)
return mock
@pytest.fixture
def mock_broken_service(mocker):
mock = mocker.patch(
"subprocess.Popen", autospec=True, return_value=BrokenServiceMock
)
return mock
@pytest.fixture
def mock_subprocess_check_output(mocker):
mock = mocker.patch(
"subprocess.check_output", autospec=True, return_value=b"Testing Linux"
)
return mock
def test_wrong_auth(wrong_auth_client):
response = wrong_auth_client.get("/system/pythonVersion")
assert response.status_code == 401
def test_get_domain(authorized_client, domain_file):
assert get_domain() == "test-domain.tld"
## Timezones
def test_get_timezone_unauthorized(client, turned_on):
response = client.get("/system/configuration/timezone")
assert response.status_code == 401
def test_get_timezone(authorized_client, turned_on):
response = authorized_client.get("/system/configuration/timezone")
assert response.status_code == 200
assert response.json() == "Europe/Moscow"
def test_get_timezone_on_undefined(authorized_client, undefined_config):
response = authorized_client.get("/system/configuration/timezone")
assert response.status_code == 200
assert response.json() == "Europe/Uzhgorod"
def test_put_timezone_unauthorized(client, turned_on):
response = client.put(
"/system/configuration/timezone", json={"timezone": "Europe/Moscow"}
)
assert response.status_code == 401
def test_put_timezone(authorized_client, turned_on):
response = authorized_client.put(
"/system/configuration/timezone", json={"timezone": "Europe/Helsinki"}
)
assert response.status_code == 200
assert read_json(turned_on / "turned_on.json")["timezone"] == "Europe/Helsinki"
def test_put_timezone_on_undefined(authorized_client, undefined_config):
response = authorized_client.put(
"/system/configuration/timezone", json={"timezone": "Europe/Helsinki"}
)
assert response.status_code == 200
assert (
read_json(undefined_config / "undefined.json")["timezone"] == "Europe/Helsinki"
)
def test_put_timezone_without_timezone(authorized_client, turned_on):
response = authorized_client.put("/system/configuration/timezone", json={})
assert response.status_code == 422
assert read_json(turned_on / "turned_on.json")["timezone"] == "Europe/Moscow"
def test_put_invalid_timezone(authorized_client, turned_on):
response = authorized_client.put(
"/system/configuration/timezone", json={"timezone": "Invalid/Timezone"}
)
assert response.status_code == 400
assert read_json(turned_on / "turned_on.json")["timezone"] == "Europe/Moscow"
## AutoUpgrade
def test_get_auto_upgrade_unauthorized(client, turned_on):
response = client.get("/system/configuration/autoUpgrade")
assert response.status_code == 401
def test_get_auto_upgrade(authorized_client, turned_on):
response = authorized_client.get("/system/configuration/autoUpgrade")
assert response.status_code == 200
assert response.json() == {
"enable": True,
"allowReboot": True,
}
def test_get_auto_upgrade_on_undefined(authorized_client, undefined_config):
response = authorized_client.get("/system/configuration/autoUpgrade")
assert response.status_code == 200
assert response.json() == {
"enable": True,
"allowReboot": False,
}
def test_get_auto_upgrade_without_values(authorized_client, no_values):
response = authorized_client.get("/system/configuration/autoUpgrade")
assert response.status_code == 200
assert response.json() == {
"enable": True,
"allowReboot": False,
}
def test_get_auto_upgrade_turned_off(authorized_client, turned_off):
response = authorized_client.get("/system/configuration/autoUpgrade")
assert response.status_code == 200
assert response.json() == {
"enable": False,
"allowReboot": False,
}
def test_put_auto_upgrade_unauthorized(client, turned_on):
response = client.put(
"/system/configuration/autoUpgrade", json={"enable": True, "allowReboot": True}
)
assert response.status_code == 401
def test_put_auto_upgrade(authorized_client, turned_on):
response = authorized_client.put(
"/system/configuration/autoUpgrade", json={"enable": False, "allowReboot": True}
)
assert response.status_code == 200
assert read_json(turned_on / "turned_on.json")["autoUpgrade"] == {
"enable": False,
"allowReboot": True,
}
def test_put_auto_upgrade_on_undefined(authorized_client, undefined_config):
response = authorized_client.put(
"/system/configuration/autoUpgrade", json={"enable": False, "allowReboot": True}
)
assert response.status_code == 200
assert read_json(undefined_config / "undefined.json")["autoUpgrade"] == {
"enable": False,
"allowReboot": True,
}
def test_put_auto_upgrade_without_values(authorized_client, no_values):
response = authorized_client.put(
"/system/configuration/autoUpgrade", json={"enable": True, "allowReboot": True}
)
assert response.status_code == 200
assert read_json(no_values / "no_values.json")["autoUpgrade"] == {
"enable": True,
"allowReboot": True,
}
def test_put_auto_upgrade_turned_off(authorized_client, turned_off):
response = authorized_client.put(
"/system/configuration/autoUpgrade", json={"enable": True, "allowReboot": True}
)
assert response.status_code == 200
assert read_json(turned_off / "turned_off.json")["autoUpgrade"] == {
"enable": True,
"allowReboot": True,
}
def test_put_auto_upgrade_without_enable(authorized_client, turned_off):
response = authorized_client.put(
"/system/configuration/autoUpgrade", json={"allowReboot": True}
)
assert response.status_code == 200
assert read_json(turned_off / "turned_off.json")["autoUpgrade"] == {
"enable": False,
"allowReboot": True,
}
def test_put_auto_upgrade_without_allow_reboot(authorized_client, turned_off):
response = authorized_client.put(
"/system/configuration/autoUpgrade", json={"enable": True}
)
assert response.status_code == 200
assert read_json(turned_off / "turned_off.json")["autoUpgrade"] == {
"enable": True,
"allowReboot": False,
}
def test_put_auto_upgrade_with_empty_json(authorized_client, turned_off):
response = authorized_client.put("/system/configuration/autoUpgrade", json={})
assert response.status_code == 200
assert read_json(turned_off / "turned_off.json")["autoUpgrade"] == {
"enable": False,
"allowReboot": False,
}
def test_system_rebuild_unauthorized(client, mock_subprocess_popen):
response = client.get("/system/configuration/apply")
assert response.status_code == 401
assert mock_subprocess_popen.call_count == 0
def test_system_rebuild(authorized_client, mock_subprocess_popen):
response = authorized_client.get("/system/configuration/apply")
assert response.status_code == 200
assert mock_subprocess_popen.call_count == 1
assert mock_subprocess_popen.call_args[0][0] == [
"systemctl",
"start",
"sp-nixos-rebuild.service",
]
def test_system_upgrade_unauthorized(client, mock_subprocess_popen):
response = client.get("/system/configuration/upgrade")
assert response.status_code == 401
assert mock_subprocess_popen.call_count == 0
def test_system_upgrade(authorized_client, mock_subprocess_popen):
response = authorized_client.get("/system/configuration/upgrade")
assert response.status_code == 200
assert mock_subprocess_popen.call_count == 1
assert mock_subprocess_popen.call_args[0][0] == [
"systemctl",
"start",
"sp-nixos-upgrade.service",
]
def test_system_rollback_unauthorized(client, mock_subprocess_popen):
response = client.get("/system/configuration/rollback")
assert response.status_code == 401
assert mock_subprocess_popen.call_count == 0
def test_system_rollback(authorized_client, mock_subprocess_popen):
response = authorized_client.get("/system/configuration/rollback")
assert response.status_code == 200
assert mock_subprocess_popen.call_count == 1
assert mock_subprocess_popen.call_args[0][0] == [
"systemctl",
"start",
"sp-nixos-rollback.service",
]
def test_get_system_version_unauthorized(client, mock_subprocess_check_output):
response = client.get("/system/version")
assert response.status_code == 401
assert mock_subprocess_check_output.call_count == 0
def test_get_system_version(authorized_client, mock_subprocess_check_output):
response = authorized_client.get("/system/version")
assert response.status_code == 200
assert response.json() == {"system_version": "Testing Linux"}
assert mock_subprocess_check_output.call_count == 1
assert mock_subprocess_check_output.call_args[0][0] == ["uname", "-a"]
def test_reboot_system_unauthorized(client, mock_subprocess_popen):
response = client.get("/system/reboot")
assert response.status_code == 401
assert mock_subprocess_popen.call_count == 0
def test_reboot_system(authorized_client, mock_subprocess_popen):
response = authorized_client.get("/system/reboot")
assert response.status_code == 200
assert mock_subprocess_popen.call_count == 1
assert mock_subprocess_popen.call_args[0][0] == ["reboot"]
def test_get_python_version_unauthorized(client, mock_subprocess_check_output):
response = client.get("/system/pythonVersion")
assert response.status_code == 401
assert mock_subprocess_check_output.call_count == 0
def test_get_python_version(authorized_client, mock_subprocess_check_output):
response = authorized_client.get("/system/pythonVersion")
assert response.status_code == 200
assert response.json() == "Testing Linux"
assert mock_subprocess_check_output.call_count == 1
assert mock_subprocess_check_output.call_args[0][0] == ["python", "-V"]
def test_pull_system_unauthorized(client, mock_subprocess_popen):
response = client.get("/system/configuration/pull")
assert response.status_code == 401
assert mock_subprocess_popen.call_count == 0
def test_pull_system(authorized_client, mock_subprocess_popen, mock_os_chdir):
current_dir = os.getcwd()
response = authorized_client.get("/system/configuration/pull")
assert response.status_code == 200
assert mock_subprocess_popen.call_count == 1
assert mock_subprocess_popen.call_args[0][0] == ["git", "pull"]
assert mock_os_chdir.call_count == 2
assert mock_os_chdir.call_args_list[0][0][0] == "/etc/nixos"
assert mock_os_chdir.call_args_list[1][0][0] == current_dir
def test_pull_system_broken_repo(authorized_client, mock_broken_service, mock_os_chdir):
current_dir = os.getcwd()
response = authorized_client.get("/system/configuration/pull")
assert response.status_code == 500
assert mock_broken_service.call_count == 1
assert mock_os_chdir.call_count == 2
assert mock_os_chdir.call_args_list[0][0][0] == "/etc/nixos"
assert mock_os_chdir.call_args_list[1][0][0] == current_dir

Some files were not shown because too many files have changed in this diff Show more