diff --git a/.gitignore b/.gitignore index 7f93e02..bd62fff 100644 --- a/.gitignore +++ b/.gitignore @@ -148,3 +148,5 @@ cython_debug/ *.db *.rdb + +/result diff --git a/README.md b/README.md new file mode 100644 index 0000000..cf87eeb --- /dev/null +++ b/README.md @@ -0,0 +1,73 @@ +# SelfPrivacy GraphQL API which allows app to control your server + +## build + +```console +$ nix build +``` + +As a result, you should get the `./result` symlink to a folder (in `/nix/store`) with build contents. + +## develop & test + +```console +$ nix develop +$ [SP devshell] pytest . +=================================== test session starts ===================================== +platform linux -- Python 3.10.11, pytest-7.1.3, pluggy-1.0.0 +rootdir: /data/selfprivacy/selfprivacy-rest-api +plugins: anyio-3.5.0, datadir-1.4.1, mock-3.8.2 +collected 692 items + +tests/test_block_device_utils.py ................. [ 2%] +tests/test_common.py ..... [ 3%] +tests/test_jobs.py ........ [ 4%] +tests/test_model_storage.py .. [ 4%] +tests/test_models.py .. [ 4%] +tests/test_network_utils.py ...... [ 5%] +tests/test_services.py ...... [ 6%] +tests/test_graphql/test_api.py . [ 6%] +tests/test_graphql/test_api_backup.py ............... [ 8%] +tests/test_graphql/test_api_devices.py ................. [ 11%] +tests/test_graphql/test_api_recovery.py ......... [ 12%] +tests/test_graphql/test_api_version.py .. [ 13%] +tests/test_graphql/test_backup.py ............................... [ 21%] +tests/test_graphql/test_localsecret.py ... [ 22%] +tests/test_graphql/test_ssh.py ............ [ 23%] +tests/test_graphql/test_system.py ............................. [ 28%] +tests/test_graphql/test_system_nixos_tasks.py ........ [ 29%] +tests/test_graphql/test_users.py .................................. [ 42%] +tests/test_graphql/test_repository/test_json_tokens_repository.py [ 44%] +tests/test_graphql/test_repository/test_tokens_repository.py .... [ 53%] +tests/test_rest_endpoints/test_auth.py .......................... [ 58%] +tests/test_rest_endpoints/test_system.py ........................ [ 63%] +tests/test_rest_endpoints/test_users.py ................................ [ 76%] +tests/test_rest_endpoints/services/test_bitwarden.py ............ [ 78%] +tests/test_rest_endpoints/services/test_gitea.py .............. [ 80%] +tests/test_rest_endpoints/services/test_mailserver.py ..... [ 81%] +tests/test_rest_endpoints/services/test_nextcloud.py ............ [ 83%] +tests/test_rest_endpoints/services/test_ocserv.py .............. [ 85%] +tests/test_rest_endpoints/services/test_pleroma.py .............. [ 87%] +tests/test_rest_endpoints/services/test_services.py .... [ 88%] +tests/test_rest_endpoints/services/test_ssh.py ..................... [100%] + +============================== 692 passed in 352.76s (0:05:52) =============================== +``` + +If you don't have experimental flakes enabled, you can use the following command: + +```console +nix --extra-experimental-features nix-command --extra-experimental-features flakes develop +``` + +## dependencies and dependant modules + +Current flake inherits nixpkgs from NixOS configuration flake. So there is no need to refer to extra nixpkgs dependency if you want to be aligned with exact NixOS configuration. + +![diagram](http://www.plantuml.com/plantuml/proxy?src=https://git.selfprivacy.org/SelfPrivacy/selfprivacy-rest-api/raw/branch/master/nix-dependencies-diagram.puml) + +Nix code for NixOS service module for API is located in NixOS configuration repository. + +## current issues + +- It's not clear how to store in this repository information about several compatible NixOS configuration commits, where API application tests pass. Currently, here is only a single `flake.lock`. diff --git a/default.nix b/default.nix new file mode 100644 index 0000000..1c779d9 --- /dev/null +++ b/default.nix @@ -0,0 +1,33 @@ +{ pythonPackages, rev ? "local" }: + +pythonPackages.buildPythonPackage rec { + pname = "selfprivacy-graphql-api"; + version = rev; + src = builtins.filterSource (p: t: p != ".git" && t != "symlink") ./.; + nativeCheckInputs = [ pythonPackages.pytestCheckHook ]; + propagatedBuildInputs = with pythonPackages; [ + fastapi + gevent + huey + mnemonic + portalocker + psutil + pydantic + pytest + pytest-datadir + pytest-mock + pytz + redis + setuptools + strawberry-graphql + typing-extensions + uvicorn + ]; + pythonImportsCheck = [ "selfprivacy_api" ]; + doCheck = false; + meta = { + description = '' + SelfPrivacy Server Management API + ''; + }; +} diff --git a/flake.lock b/flake.lock new file mode 100644 index 0000000..3dd8a15 --- /dev/null +++ b/flake.lock @@ -0,0 +1,26 @@ +{ + "nodes": { + "nixpkgs": { + "locked": { + "lastModified": 1702780907, + "narHash": "sha256-blbrBBXjjZt6OKTcYX1jpe9SRof2P9ZYWPzq22tzXAA=", + "owner": "nixos", + "repo": "nixpkgs", + "rev": "1e2e384c5b7c50dbf8e9c441a9e58d85f408b01f", + "type": "github" + }, + "original": { + "owner": "nixos", + "repo": "nixpkgs", + "type": "github" + } + }, + "root": { + "inputs": { + "nixpkgs": "nixpkgs" + } + } + }, + "root": "root", + "version": 7 +} diff --git a/flake.nix b/flake.nix new file mode 100644 index 0000000..c133604 --- /dev/null +++ b/flake.nix @@ -0,0 +1,50 @@ +{ + description = "SelfPrivacy API flake"; + + inputs.nixpkgs.url = "github:nixos/nixpkgs"; + + outputs = { self, nixpkgs, ... }: + let + system = "x86_64-linux"; + pkgs = nixpkgs.legacyPackages.${system}; + selfprivacy-graphql-api = pkgs.callPackage ./default.nix { + pythonPackages = pkgs.python310Packages; + rev = self.shortRev or self.dirtyShortRev or "dirty"; + }; + in + { + packages.${system}.default = selfprivacy-graphql-api; + nixosModules.default = + import ./nixos/module.nix self.packages.${system}.default; + devShells.${system}.default = pkgs.mkShell { + packages = + let + # TODO is there a better way to get environment for VS Code? + python3 = + nixpkgs.lib.findFirst (p: p.pname == "python3") (abort "wtf") + self.packages.${system}.default.propagatedBuildInputs; + python-env = + python3.withPackages + (_: self.packages.${system}.default.propagatedBuildInputs); + in + with pkgs; [ + python-env + black + rclone + redis + restic + ]; + shellHook = '' + # envs set with export and as attributes are treated differently. + # for example. printenv will not fetch the value of an attribute. + export USE_REDIS_PORT=6379 + export TEST_MODE=true + pkill redis-server + sleep 2 + setsid redis-server --bind 127.0.0.1 --port $USE_REDIS_PORT >/dev/null 2>/dev/null & + # maybe set more env-vars + ''; + }; + }; + nixConfig.bash-prompt = ''\n\[\e[1;32m\][\[\e[0m\]\[\e[1;34m\]SP devshell\[\e[0m\]\[\e[1;32m\]:\w]\$\[\[\e[0m\] ''; +} diff --git a/nix-dependencies-diagram.puml b/nix-dependencies-diagram.puml new file mode 100644 index 0000000..de98bf7 --- /dev/null +++ b/nix-dependencies-diagram.puml @@ -0,0 +1,22 @@ +@startuml + +left to right direction + +title repositories and flake inputs relations diagram + +cloud nixpkgs as nixpkgs_transit +control "nixos-rebuild" as nixos_rebuild +component "SelfPrivacy\nAPI app" as selfprivacy_app +component "SelfPrivacy\nNixOS configuration" as nixos_configuration + +note top of nixos_configuration : SelfPrivacy\nAPI service module + +nixos_configuration ).. nixpkgs_transit +nixpkgs_transit ..> selfprivacy_app +selfprivacy_app --> nixos_configuration +[nixpkgs] --> nixos_configuration +nixos_configuration -> nixos_rebuild + +footer %date("yyyy-MM-dd'T'HH:mmZ") + +@enduml diff --git a/nixos/module.nix b/nixos/module.nix new file mode 100644 index 0000000..7790e18 --- /dev/null +++ b/nixos/module.nix @@ -0,0 +1,166 @@ +selfprivacy-graphql-api: { config, lib, pkgs, ... }: + +let + cfg = config.services.selfprivacy-api; + config-id = "default"; + nixos-rebuild = "${config.system.build.nixos-rebuild}/bin/nixos-rebuild"; + nix = "${config.nix.package.out}/bin/nix"; +in +{ + options.services.selfprivacy-api = { + enable = lib.mkOption { + default = true; + type = lib.types.bool; + description = '' + Enable SelfPrivacy API service + ''; + }; + }; + config = lib.mkIf cfg.enable { + users.users."selfprivacy-api" = { + isNormalUser = false; + isSystemUser = true; + extraGroups = [ "opendkim" ]; + group = "selfprivacy-api"; + }; + users.groups."selfprivacy-api".members = [ "selfprivacy-api" ]; + + systemd.services.selfprivacy-api = { + description = "API Server used to control system from the mobile application"; + environment = config.nix.envVars // { + HOME = "/root"; + PYTHONUNBUFFERED = "1"; + } // config.networking.proxy.envVars; + path = [ + "/var/" + "/var/dkim/" + pkgs.coreutils + pkgs.gnutar + pkgs.xz.bin + pkgs.gzip + pkgs.gitMinimal + config.nix.package.out + pkgs.restic + pkgs.mkpasswd + pkgs.util-linux + pkgs.e2fsprogs + pkgs.iproute2 + ]; + after = [ "network-online.target" ]; + wantedBy = [ "network-online.target" ]; + serviceConfig = { + User = "root"; + ExecStart = "${selfprivacy-graphql-api}/bin/app.py"; + Restart = "always"; + RestartSec = "5"; + }; + }; + systemd.services.selfprivacy-api-worker = { + description = "Task worker for SelfPrivacy API"; + environment = config.nix.envVars // { + HOME = "/root"; + PYTHONUNBUFFERED = "1"; + PYTHONPATH = + pkgs.python310Packages.makePythonPath [ selfprivacy-graphql-api ]; + } // config.networking.proxy.envVars; + path = [ + "/var/" + "/var/dkim/" + pkgs.coreutils + pkgs.gnutar + pkgs.xz.bin + pkgs.gzip + pkgs.gitMinimal + config.nix.package.out + pkgs.restic + pkgs.mkpasswd + pkgs.util-linux + pkgs.e2fsprogs + pkgs.iproute2 + ]; + after = [ "network-online.target" ]; + wantedBy = [ "network-online.target" ]; + serviceConfig = { + User = "root"; + ExecStart = "${pkgs.python310Packages.huey}/bin/huey_consumer.py selfprivacy_api.task_registry.huey"; + Restart = "always"; + RestartSec = "5"; + }; + }; + # One shot systemd service to rebuild NixOS using nixos-rebuild + systemd.services.sp-nixos-rebuild = { + description = "nixos-rebuild switch"; + environment = config.nix.envVars // { + HOME = "/root"; + } // config.networking.proxy.envVars; + # TODO figure out how to get dependencies list reliably + path = [ pkgs.coreutils pkgs.gnutar pkgs.xz.bin pkgs.gzip pkgs.gitMinimal config.nix.package.out ]; + # TODO set proper timeout for reboot instead of service restart + serviceConfig = { + User = "root"; + WorkingDirectory = "/etc/nixos"; + # sync top-level flake with sp-modules sub-flake + # (https://github.com/NixOS/nix/issues/9339) + ExecStartPre = '' + ${nix} flake lock --override-input sp-modules path:./sp-modules + ''; + ExecStart = '' + ${nixos-rebuild} switch --flake .#${config-id} + ''; + KillMode = "none"; + SendSIGKILL = "no"; + }; + restartIfChanged = false; + unitConfig.X-StopOnRemoval = false; + }; + # One shot systemd service to upgrade NixOS using nixos-rebuild + systemd.services.sp-nixos-upgrade = { + # protection against simultaneous runs + after = [ "sp-nixos-rebuild.service" ]; + description = "Upgrade NixOS and SP modules to latest versions"; + environment = config.nix.envVars // { + HOME = "/root"; + } // config.networking.proxy.envVars; + # TODO figure out how to get dependencies list reliably + path = [ pkgs.coreutils pkgs.gnutar pkgs.xz.bin pkgs.gzip pkgs.gitMinimal config.nix.package.out ]; + serviceConfig = { + User = "root"; + WorkingDirectory = "/etc/nixos"; + # TODO get URL from systemd template parameter? + ExecStartPre = '' + ${nix} flake update \ + --override-input selfprivacy-nixos-config git+https://git.selfprivacy.org/SelfPrivacy/selfprivacy-nixos-config.git?ref=flakes + ''; + ExecStart = '' + ${nixos-rebuild} switch --flake .#${config-id} + ''; + KillMode = "none"; + SendSIGKILL = "no"; + }; + restartIfChanged = false; + unitConfig.X-StopOnRemoval = false; + }; + # One shot systemd service to rollback NixOS using nixos-rebuild + systemd.services.sp-nixos-rollback = { + # protection against simultaneous runs + after = [ "sp-nixos-rebuild.service" "sp-nixos-upgrade.service" ]; + description = "Rollback NixOS using nixos-rebuild"; + environment = config.nix.envVars // { + HOME = "/root"; + } // config.networking.proxy.envVars; + # TODO figure out how to get dependencies list reliably + path = [ pkgs.coreutils pkgs.gnutar pkgs.xz.bin pkgs.gzip pkgs.gitMinimal config.nix.package.out ]; + serviceConfig = { + User = "root"; + WorkingDirectory = "/etc/nixos"; + ExecStart = '' + ${nixos-rebuild} switch --rollback --flake .#${config-id} + ''; + KillMode = "none"; + SendSIGKILL = "no"; + }; + restartIfChanged = false; + unitConfig.X-StopOnRemoval = false; + }; + }; +} diff --git a/selfprivacy_api/actions/api_tokens.py b/selfprivacy_api/actions/api_tokens.py index 38133fd..e93491f 100644 --- a/selfprivacy_api/actions/api_tokens.py +++ b/selfprivacy_api/actions/api_tokens.py @@ -1,11 +1,15 @@ -"""App tokens actions""" -from datetime import datetime +""" +App tokens actions. +The only actions on tokens that are accessible from APIs +""" +from datetime import datetime, timezone from typing import Optional from pydantic import BaseModel from mnemonic import Mnemonic -from selfprivacy_api.repositories.tokens.json_tokens_repository import ( - JsonTokensRepository, +from selfprivacy_api.utils.timeutils import ensure_tz_aware, ensure_tz_aware_strict +from selfprivacy_api.repositories.tokens.redis_tokens_repository import ( + RedisTokensRepository, ) from selfprivacy_api.repositories.tokens.exceptions import ( TokenNotFound, @@ -14,7 +18,7 @@ from selfprivacy_api.repositories.tokens.exceptions import ( NewDeviceKeyNotFound, ) -TOKEN_REPO = JsonTokensRepository() +TOKEN_REPO = RedisTokensRepository() class TokenInfoWithIsCaller(BaseModel): @@ -25,6 +29,14 @@ class TokenInfoWithIsCaller(BaseModel): is_caller: bool +def _naive(date_time: datetime) -> datetime: + if date_time is None: + return None + if date_time.tzinfo is not None: + date_time.astimezone(timezone.utc) + return date_time.replace(tzinfo=None) + + def get_api_tokens_with_caller_flag(caller_token: str) -> list[TokenInfoWithIsCaller]: """Get the tokens info""" caller_name = TOKEN_REPO.get_token_by_token_string(caller_token).device_name @@ -83,16 +95,22 @@ class RecoveryTokenStatus(BaseModel): def get_api_recovery_token_status() -> RecoveryTokenStatus: - """Get the recovery token status""" + """Get the recovery token status, timezone-aware""" token = TOKEN_REPO.get_recovery_key() if token is None: return RecoveryTokenStatus(exists=False, valid=False) is_valid = TOKEN_REPO.is_recovery_key_valid() + + # New tokens are tz-aware, but older ones might not be + expiry_date = token.expires_at + if expiry_date is not None: + expiry_date = ensure_tz_aware_strict(expiry_date) + return RecoveryTokenStatus( exists=True, valid=is_valid, - date=token.created_at, - expiration=token.expires_at, + date=ensure_tz_aware_strict(token.created_at), + expiration=expiry_date, uses_left=token.uses_left, ) @@ -110,8 +128,9 @@ def get_new_api_recovery_key( ) -> str: """Get new recovery key""" if expiration_date is not None: - current_time = datetime.now().timestamp() - if expiration_date.timestamp() < current_time: + expiration_date = ensure_tz_aware(expiration_date) + current_time = datetime.now(timezone.utc) + if expiration_date < current_time: raise InvalidExpirationDate("Expiration date is in the past") if uses_left is not None: if uses_left <= 0: diff --git a/selfprivacy_api/actions/ssh.py b/selfprivacy_api/actions/ssh.py index 3f79ff8..0c529ef 100644 --- a/selfprivacy_api/actions/ssh.py +++ b/selfprivacy_api/actions/ssh.py @@ -31,7 +31,7 @@ def get_ssh_settings() -> UserdataSshSettings: if "enable" not in data["ssh"]: data["ssh"]["enable"] = True if "passwordAuthentication" not in data["ssh"]: - data["ssh"]["passwordAuthentication"] = True + data["ssh"]["passwordAuthentication"] = False if "rootKeys" not in data["ssh"]: data["ssh"]["rootKeys"] = [] return UserdataSshSettings(**data["ssh"]) @@ -49,19 +49,6 @@ def set_ssh_settings( data["ssh"]["passwordAuthentication"] = password_authentication -def add_root_ssh_key(public_key: str): - with WriteUserData() as data: - if "ssh" not in data: - data["ssh"] = {} - if "rootKeys" not in data["ssh"]: - data["ssh"]["rootKeys"] = [] - # Return 409 if key already in array - for key in data["ssh"]["rootKeys"]: - if key == public_key: - raise KeyAlreadyExists() - data["ssh"]["rootKeys"].append(public_key) - - class KeyAlreadyExists(Exception): """Key already exists""" diff --git a/selfprivacy_api/actions/system.py b/selfprivacy_api/actions/system.py index 853662f..13c3708 100644 --- a/selfprivacy_api/actions/system.py +++ b/selfprivacy_api/actions/system.py @@ -2,7 +2,7 @@ import os import subprocess import pytz -from typing import Optional +from typing import Optional, List from pydantic import BaseModel from selfprivacy_api.utils import WriteUserData, ReadUserData @@ -13,7 +13,7 @@ def get_timezone() -> str: with ReadUserData() as user_data: if "timezone" in user_data: return user_data["timezone"] - return "Europe/Uzhgorod" + return "Etc/UTC" class InvalidTimezone(Exception): @@ -58,36 +58,56 @@ def set_auto_upgrade_settings( user_data["autoUpgrade"]["allowReboot"] = allowReboot +class ShellException(Exception): + """Something went wrong when calling another process""" + + pass + + +def run_blocking(cmd: List[str], new_session: bool = False) -> str: + """Run a process, block until done, return output, complain if failed""" + process_handle = subprocess.Popen( + cmd, + shell=False, + start_new_session=new_session, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + stdout_raw, stderr_raw = process_handle.communicate() + stdout = stdout_raw.decode("utf-8") + if stderr_raw is not None: + stderr = stderr_raw.decode("utf-8") + else: + stderr = "" + output = stdout + "\n" + stderr + if process_handle.returncode != 0: + raise ShellException( + f"Shell command failed, command array: {cmd}, output: {output}" + ) + return stdout + + def rebuild_system() -> int: """Rebuild the system""" - rebuild_result = subprocess.Popen( - ["systemctl", "start", "sp-nixos-rebuild.service"], start_new_session=True - ) - rebuild_result.communicate()[0] - return rebuild_result.returncode + run_blocking(["systemctl", "start", "sp-nixos-rebuild.service"], new_session=True) + return 0 def rollback_system() -> int: """Rollback the system""" - rollback_result = subprocess.Popen( - ["systemctl", "start", "sp-nixos-rollback.service"], start_new_session=True - ) - rollback_result.communicate()[0] - return rollback_result.returncode + run_blocking(["systemctl", "start", "sp-nixos-rollback.service"], new_session=True) + return 0 def upgrade_system() -> int: """Upgrade the system""" - upgrade_result = subprocess.Popen( - ["systemctl", "start", "sp-nixos-upgrade.service"], start_new_session=True - ) - upgrade_result.communicate()[0] - return upgrade_result.returncode + run_blocking(["systemctl", "start", "sp-nixos-upgrade.service"], new_session=True) + return 0 def reboot_system() -> None: """Reboot the system""" - subprocess.Popen(["reboot"], start_new_session=True) + run_blocking(["reboot"], new_session=True) def get_system_version() -> str: diff --git a/selfprivacy_api/actions/users.py b/selfprivacy_api/actions/users.py index bfc1756..fafa84f 100644 --- a/selfprivacy_api/actions/users.py +++ b/selfprivacy_api/actions/users.py @@ -58,7 +58,7 @@ def get_users( ) for user in user_data["users"] ] - if not exclude_primary: + if not exclude_primary and "username" in user_data.keys(): users.append( UserDataUser( username=user_data["username"], @@ -107,6 +107,12 @@ class PasswordIsEmpty(Exception): pass +class InvalidConfiguration(Exception): + """The userdata is broken""" + + pass + + def create_user(username: str, password: str): if password == "": raise PasswordIsEmpty("Password is empty") @@ -124,6 +130,10 @@ def create_user(username: str, password: str): with ReadUserData() as user_data: ensure_ssh_and_users_fields_exist(user_data) + if "username" not in user_data.keys(): + raise InvalidConfiguration( + "Broken config: Admin name is not defined. Consider recovery or add it manually" + ) if username == user_data["username"]: raise UserAlreadyExists("User already exists") if username in [user["username"] for user in user_data["users"]]: diff --git a/selfprivacy_api/app.py b/selfprivacy_api/app.py index a58301a..64ca85a 100644 --- a/selfprivacy_api/app.py +++ b/selfprivacy_api/app.py @@ -10,12 +10,6 @@ from selfprivacy_api.dependencies import get_api_version from selfprivacy_api.graphql.schema import schema from selfprivacy_api.migrations import run_migrations -from selfprivacy_api.rest import ( - system, - users, - api_auth, - services, -) app = FastAPI() @@ -32,10 +26,6 @@ app.add_middleware( ) -app.include_router(system.router) -app.include_router(users.router) -app.include_router(api_auth.router) -app.include_router(services.router) app.include_router(graphql_app, prefix="/graphql") diff --git a/selfprivacy_api/backup/__init__.py b/selfprivacy_api/backup/__init__.py index 7b013f4..0fa845e 100644 --- a/selfprivacy_api/backup/__init__.py +++ b/selfprivacy_api/backup/__init__.py @@ -1,10 +1,11 @@ """ This module contains the controller class for backups. """ -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone +import time import os from os import statvfs -from typing import List, Optional +from typing import Callable, List, Optional from selfprivacy_api.utils import ReadUserData, WriteUserData @@ -23,7 +24,12 @@ from selfprivacy_api.jobs import Jobs, JobStatus, Job from selfprivacy_api.graphql.queries.providers import ( BackupProvider as BackupProviderEnum, ) -from selfprivacy_api.graphql.common_types.backup import RestoreStrategy +from selfprivacy_api.graphql.common_types.backup import ( + RestoreStrategy, + BackupReason, + AutobackupQuotas, +) + from selfprivacy_api.models.backup.snapshot import Snapshot @@ -32,6 +38,7 @@ from selfprivacy_api.backup.providers import get_provider from selfprivacy_api.backup.storage import Storage from selfprivacy_api.backup.jobs import ( get_backup_job, + get_backup_fail, add_backup_job, get_restore_job, add_restore_job, @@ -51,6 +58,8 @@ BACKUP_PROVIDER_ENVS = { "location": "BACKUP_LOCATION", } +AUTOBACKUP_JOB_EXPIRATION_SECONDS = 60 * 60 # one hour + class NotDeadError(AssertionError): """ @@ -70,6 +79,24 @@ class NotDeadError(AssertionError): """ +class RotationBucket: + """ + Bucket object used for rotation. + Has the following mutable fields: + - the counter, int + - the lambda function which takes datetime and the int and returns the int + - the last, int + """ + + def __init__(self, counter: int, last: int, rotation_lambda): + self.counter: int = counter + self.last: int = last + self.rotation_lambda: Callable[[datetime, int], int] = rotation_lambda + + def __str__(self) -> str: + return f"Bucket(counter={self.counter}, last={self.last})" + + class Backups: """A stateless controller class for backups""" @@ -264,10 +291,12 @@ class Backups: # Backup @staticmethod - def back_up(service: Service) -> Snapshot: - """The top-level function to back up a service""" - folders = service.get_folders() - tag = service.get_id() + def back_up( + service: Service, reason: BackupReason = BackupReason.EXPLICIT + ) -> Snapshot: + """The top-level function to back up a service + If it fails for any reason at all, it should both mark job as + errored and re-raise an error""" job = get_backup_job(service) if job is None: @@ -275,20 +304,132 @@ class Backups: Jobs.update(job, status=JobStatus.RUNNING) try: + if service.can_be_backed_up() is False: + raise ValueError("cannot backup a non-backuppable service") + folders = service.get_folders() + service_name = service.get_id() service.pre_backup() snapshot = Backups.provider().backupper.start_backup( folders, - tag, + service_name, + reason=reason, ) - Backups._store_last_snapshot(tag, snapshot) + + Backups._store_last_snapshot(service_name, snapshot) + if reason == BackupReason.AUTO: + Backups._prune_auto_snaps(service) service.post_restore() except Exception as error: Jobs.update(job, status=JobStatus.ERROR, status_text=str(error)) raise error Jobs.update(job, status=JobStatus.FINISHED) + if reason in [BackupReason.AUTO, BackupReason.PRE_RESTORE]: + Jobs.set_expiration(job, AUTOBACKUP_JOB_EXPIRATION_SECONDS) return snapshot + @staticmethod + def _auto_snaps(service): + return [ + snap + for snap in Backups.get_snapshots(service) + if snap.reason == BackupReason.AUTO + ] + + @staticmethod + def _prune_snaps_with_quotas(snapshots: List[Snapshot]) -> List[Snapshot]: + # Function broken out for testability + # Sorting newest first + sorted_snaps = sorted(snapshots, key=lambda s: s.created_at, reverse=True) + quotas: AutobackupQuotas = Backups.autobackup_quotas() + + buckets: list[RotationBucket] = [ + RotationBucket( + quotas.last, # type: ignore + -1, + lambda _, index: index, + ), + RotationBucket( + quotas.daily, # type: ignore + -1, + lambda date, _: date.year * 10000 + date.month * 100 + date.day, + ), + RotationBucket( + quotas.weekly, # type: ignore + -1, + lambda date, _: date.year * 100 + date.isocalendar()[1], + ), + RotationBucket( + quotas.monthly, # type: ignore + -1, + lambda date, _: date.year * 100 + date.month, + ), + RotationBucket( + quotas.yearly, # type: ignore + -1, + lambda date, _: date.year, + ), + ] + + new_snaplist: List[Snapshot] = [] + for i, snap in enumerate(sorted_snaps): + keep_snap = False + for bucket in buckets: + if (bucket.counter > 0) or (bucket.counter == -1): + val = bucket.rotation_lambda(snap.created_at, i) + if (val != bucket.last) or (i == len(sorted_snaps) - 1): + bucket.last = val + if bucket.counter > 0: + bucket.counter -= 1 + if not keep_snap: + new_snaplist.append(snap) + keep_snap = True + + return new_snaplist + + @staticmethod + def _prune_auto_snaps(service) -> None: + # Not very testable by itself, so most testing is going on Backups._prune_snaps_with_quotas + # We can still test total limits and, say, daily limits + + auto_snaps = Backups._auto_snaps(service) + new_snaplist = Backups._prune_snaps_with_quotas(auto_snaps) + + deletable_snaps = [snap for snap in auto_snaps if snap not in new_snaplist] + Backups.forget_snapshots(deletable_snaps) + + @staticmethod + def _standardize_quotas(i: int) -> int: + if i <= -1: + i = -1 + return i + + @staticmethod + def autobackup_quotas() -> AutobackupQuotas: + """0 means do not keep, -1 means unlimited""" + + return Storage.autobackup_quotas() + + @staticmethod + def set_autobackup_quotas(quotas: AutobackupQuotas) -> None: + """0 means do not keep, -1 means unlimited""" + + Storage.set_autobackup_quotas( + AutobackupQuotas( + last=Backups._standardize_quotas(quotas.last), # type: ignore + daily=Backups._standardize_quotas(quotas.daily), # type: ignore + weekly=Backups._standardize_quotas(quotas.weekly), # type: ignore + monthly=Backups._standardize_quotas(quotas.monthly), # type: ignore + yearly=Backups._standardize_quotas(quotas.yearly), # type: ignore + ) + ) + # do not prune all autosnaps right away, this will be done by an async task + + @staticmethod + def prune_all_autosnaps() -> None: + for service in get_all_services(): + Backups._prune_auto_snaps(service) + # Restoring @staticmethod @@ -307,9 +448,9 @@ class Backups: job: Job, ) -> None: Jobs.update( - job, status=JobStatus.CREATED, status_text=f"Waiting for pre-restore backup" + job, status=JobStatus.CREATED, status_text="Waiting for pre-restore backup" ) - failsafe_snapshot = Backups.back_up(service) + failsafe_snapshot = Backups.back_up(service, BackupReason.PRE_RESTORE) Jobs.update( job, status=JobStatus.RUNNING, status_text=f"Restoring from {snapshot.id}" @@ -465,6 +606,19 @@ class Backups: return snap + @staticmethod + def forget_snapshots(snapshots: List[Snapshot]) -> None: + """ + Deletes a batch of snapshots from the repo and from cache + Optimized + """ + ids = [snapshot.id for snapshot in snapshots] + Backups.provider().backupper.forget_snapshots(ids) + + # less critical + for snapshot in snapshots: + Storage.delete_cached_snapshot(snapshot) + @staticmethod def forget_snapshot(snapshot: Snapshot) -> None: """Deletes a snapshot from the repo and from cache""" @@ -473,11 +627,11 @@ class Backups: @staticmethod def forget_all_snapshots(): - """deliberately erase all snapshots we made""" - # there is no dedicated optimized command for this, - # but maybe we can have a multi-erase - for snapshot in Backups.get_all_snapshots(): - Backups.forget_snapshot(snapshot) + """ + Mark all snapshots we have made for deletion and make them inaccessible + (this is done by cloud, we only issue a command) + """ + Backups.forget_snapshots(Backups.get_all_snapshots()) @staticmethod def force_snapshot_cache_reload() -> None: @@ -557,23 +711,49 @@ class Backups: """Get a timezone-aware time of the last backup of a service""" return Storage.get_last_backup_time(service.get_id()) + @staticmethod + def get_last_backup_error_time(service: Service) -> Optional[datetime]: + """Get a timezone-aware time of the last backup of a service""" + job = get_backup_fail(service) + if job is not None: + datetime_created = job.created_at + if datetime_created.tzinfo is None: + # assume it is in localtime + offset = timedelta(seconds=time.localtime().tm_gmtoff) + datetime_created = datetime_created - offset + return datetime.combine( + datetime_created.date(), datetime_created.time(), timezone.utc + ) + return datetime_created + return None + @staticmethod def is_time_to_backup_service(service: Service, time: datetime): """Returns True if it is time to back up a service""" period = Backups.autobackup_period_minutes() - service_id = service.get_id() - if not service.can_be_backed_up(): - return False if period is None: return False - last_backup = Storage.get_last_backup_time(service_id) + if not service.is_enabled(): + return False + if not service.can_be_backed_up(): + return False + + last_error = Backups.get_last_backup_error_time(service) + + if last_error is not None: + if time < last_error + timedelta(seconds=AUTOBACKUP_JOB_EXPIRATION_SECONDS): + return False + + last_backup = Backups.get_last_backed_up(service) + + # Queue a backup immediately if there are no previous backups if last_backup is None: - # queue a backup immediately if there are no previous backups return True if time > last_backup + timedelta(minutes=period): return True + return False # Helpers diff --git a/selfprivacy_api/backup/backuppers/__init__.py b/selfprivacy_api/backup/backuppers/__init__.py index ccf78b9..46a719e 100644 --- a/selfprivacy_api/backup/backuppers/__init__.py +++ b/selfprivacy_api/backup/backuppers/__init__.py @@ -2,6 +2,7 @@ from abc import ABC, abstractmethod from typing import List from selfprivacy_api.models.backup.snapshot import Snapshot +from selfprivacy_api.graphql.common_types.backup import BackupReason class AbstractBackupper(ABC): @@ -22,7 +23,12 @@ class AbstractBackupper(ABC): raise NotImplementedError @abstractmethod - def start_backup(self, folders: List[str], tag: str) -> Snapshot: + def start_backup( + self, + folders: List[str], + service_name: str, + reason: BackupReason = BackupReason.EXPLICIT, + ) -> Snapshot: """Start a backup of the given folders""" raise NotImplementedError @@ -60,3 +66,8 @@ class AbstractBackupper(ABC): def forget_snapshot(self, snapshot_id) -> None: """Forget a snapshot""" raise NotImplementedError + + @abstractmethod + def forget_snapshots(self, snapshot_ids: List[str]) -> None: + """Maybe optimized deletion of a batch of snapshots, just cycling if unsupported""" + raise NotImplementedError diff --git a/selfprivacy_api/backup/backuppers/none_backupper.py b/selfprivacy_api/backup/backuppers/none_backupper.py index 3f9f7fd..86e25a6 100644 --- a/selfprivacy_api/backup/backuppers/none_backupper.py +++ b/selfprivacy_api/backup/backuppers/none_backupper.py @@ -2,6 +2,7 @@ from typing import List from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.backup.backuppers import AbstractBackupper +from selfprivacy_api.graphql.common_types.backup import BackupReason class NoneBackupper(AbstractBackupper): @@ -13,7 +14,9 @@ class NoneBackupper(AbstractBackupper): def set_creds(self, account: str, key: str, repo: str): pass - def start_backup(self, folders: List[str], tag: str): + def start_backup( + self, folders: List[str], tag: str, reason: BackupReason = BackupReason.EXPLICIT + ): raise NotImplementedError def get_snapshots(self) -> List[Snapshot]: @@ -36,4 +39,7 @@ class NoneBackupper(AbstractBackupper): raise NotImplementedError def forget_snapshot(self, snapshot_id): - raise NotImplementedError + raise NotImplementedError("forget_snapshot") + + def forget_snapshots(self, snapshots): + raise NotImplementedError("forget_snapshots") diff --git a/selfprivacy_api/backup/backuppers/restic_backupper.py b/selfprivacy_api/backup/backuppers/restic_backupper.py index f508368..a8d4e05 100644 --- a/selfprivacy_api/backup/backuppers/restic_backupper.py +++ b/selfprivacy_api/backup/backuppers/restic_backupper.py @@ -5,13 +5,14 @@ import json import datetime import tempfile -from typing import List, TypeVar, Callable +from typing import List, Optional, TypeVar, Callable from collections.abc import Iterable from json.decoder import JSONDecodeError from os.path import exists, join from os import mkdir from shutil import rmtree +from selfprivacy_api.graphql.common_types.backup import BackupReason from selfprivacy_api.backup.util import output_yielder, sync from selfprivacy_api.backup.backuppers import AbstractBackupper from selfprivacy_api.models.backup.snapshot import Snapshot @@ -84,7 +85,14 @@ class ResticBackupper(AbstractBackupper): def _password_command(self): return f"echo {LocalBackupSecret.get()}" - def restic_command(self, *args, tag: str = "") -> List[str]: + def restic_command(self, *args, tags: Optional[List[str]] = None) -> List[str]: + """ + Construct a restic command against the currently configured repo + Can support [nested] arrays as arguments, will flatten them into the final commmand + """ + if tags is None: + tags = [] + command = [ "restic", "-o", @@ -94,13 +102,14 @@ class ResticBackupper(AbstractBackupper): "--password-command", self._password_command(), ] - if tag != "": - command.extend( - [ - "--tag", - tag, - ] - ) + if tags != []: + for tag in tags: + command.extend( + [ + "--tag", + tag, + ] + ) if args: command.extend(ResticBackupper.__flatten_list(args)) return command @@ -138,7 +147,12 @@ class ResticBackupper(AbstractBackupper): return result @unlocked_repo - def start_backup(self, folders: List[str], tag: str) -> Snapshot: + def start_backup( + self, + folders: List[str], + service_name: str, + reason: BackupReason = BackupReason.EXPLICIT, + ) -> Snapshot: """ Start backup with restic """ @@ -147,33 +161,35 @@ class ResticBackupper(AbstractBackupper): # of a string and an array of strings assert not isinstance(folders, str) + tags = [service_name, reason.value] + backup_command = self.restic_command( "backup", "--json", folders, - tag=tag, + tags=tags, ) - messages = [] - - service = get_service_by_id(tag) + service = get_service_by_id(service_name) if service is None: - raise ValueError("No service with id ", tag) - + raise ValueError("No service with id ", service_name) job = get_backup_job(service) + + messages = [] output = [] try: for raw_message in output_yielder(backup_command): output.append(raw_message) - message = self.parse_message( - raw_message, - job, - ) + message = self.parse_message(raw_message, job) messages.append(message) - return ResticBackupper._snapshot_from_backup_messages( - messages, - tag, + id = ResticBackupper._snapshot_id_from_backup_messages(messages) + return Snapshot( + created_at=datetime.datetime.now(datetime.timezone.utc), + id=id, + service_name=service_name, + reason=reason, ) + except ValueError as error: raise ValueError( "Could not create a snapshot: ", @@ -181,16 +197,18 @@ class ResticBackupper(AbstractBackupper): output, "parsed messages:", messages, + "command: ", + backup_command, ) from error @staticmethod - def _snapshot_from_backup_messages(messages, repo_name) -> Snapshot: + def _snapshot_id_from_backup_messages(messages) -> str: for message in messages: if message["message_type"] == "summary": - return ResticBackupper._snapshot_from_fresh_summary( - message, - repo_name, - ) + # There is a discrepancy between versions of restic/rclone + # Some report short_id in this field and some full + return message["snapshot_id"][0:SHORT_ID_LEN] + raise ValueError("no summary message in restic json output") def parse_message(self, raw_message_line: str, job=None) -> dict: @@ -206,16 +224,6 @@ class ResticBackupper(AbstractBackupper): ) return message - @staticmethod - def _snapshot_from_fresh_summary(message: dict, repo_name) -> Snapshot: - return Snapshot( - # There is a discrepancy between versions of restic/rclone - # Some report short_id in this field and some full - id=message["snapshot_id"][0:SHORT_ID_LEN], - created_at=datetime.datetime.now(datetime.timezone.utc), - service_name=repo_name, - ) - def init(self) -> None: init_command = self.restic_command( "init", @@ -364,7 +372,6 @@ class ResticBackupper(AbstractBackupper): stderr=subprocess.STDOUT, shell=False, ) as handle: - # for some reason restore does not support # nice reporting of progress via json output = handle.communicate()[0].decode("utf-8") @@ -382,15 +389,17 @@ class ResticBackupper(AbstractBackupper): output, ) + def forget_snapshot(self, snapshot_id: str) -> None: + self.forget_snapshots([snapshot_id]) + @unlocked_repo - def forget_snapshot(self, snapshot_id) -> None: - """ - Either removes snapshot or marks it for deletion later, - depending on server settings - """ + def forget_snapshots(self, snapshot_ids: List[str]) -> None: + # in case the backupper program supports batching, otherwise implement it by cycling forget_command = self.restic_command( "forget", - snapshot_id, + [snapshot_ids], + # TODO: prune should be done in a separate process + "--prune", ) with subprocess.Popen( @@ -410,7 +419,7 @@ class ResticBackupper(AbstractBackupper): if "no matching ID found" in err: raise ValueError( - "trying to delete, but no such snapshot: ", snapshot_id + "trying to delete, but no such snapshot(s): ", snapshot_ids ) assert ( @@ -450,11 +459,19 @@ class ResticBackupper(AbstractBackupper): def get_snapshots(self) -> List[Snapshot]: """Get all snapshots from the repo""" snapshots = [] + for restic_snapshot in self._load_snapshots(): + # Compatibility with previous snaps: + if len(restic_snapshot["tags"]) == 1: + reason = BackupReason.EXPLICIT + else: + reason = restic_snapshot["tags"][1] + snapshot = Snapshot( id=restic_snapshot["short_id"], created_at=restic_snapshot["time"], service_name=restic_snapshot["tags"][0], + reason=reason, ) snapshots.append(snapshot) diff --git a/selfprivacy_api/backup/jobs.py b/selfprivacy_api/backup/jobs.py index ab4eaca..0aacd86 100644 --- a/selfprivacy_api/backup/jobs.py +++ b/selfprivacy_api/backup/jobs.py @@ -80,9 +80,19 @@ def get_job_by_type(type_id: str) -> Optional[Job]: return job +def get_failed_job_by_type(type_id: str) -> Optional[Job]: + for job in Jobs.get_jobs(): + if job.type_id == type_id and job.status == JobStatus.ERROR: + return job + + def get_backup_job(service: Service) -> Optional[Job]: return get_job_by_type(backup_job_type(service)) +def get_backup_fail(service: Service) -> Optional[Job]: + return get_failed_job_by_type(backup_job_type(service)) + + def get_restore_job(service: Service) -> Optional[Job]: return get_job_by_type(restore_job_type(service)) diff --git a/selfprivacy_api/backup/storage.py b/selfprivacy_api/backup/storage.py index 4d1d415..2235437 100644 --- a/selfprivacy_api/backup/storage.py +++ b/selfprivacy_api/backup/storage.py @@ -6,6 +6,10 @@ from datetime import datetime from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.models.backup.provider import BackupProviderModel +from selfprivacy_api.graphql.common_types.backup import ( + AutobackupQuotas, + _AutobackupQuotas, +) from selfprivacy_api.utils.redis_pool import RedisPool from selfprivacy_api.utils.redis_model_storage import ( @@ -23,6 +27,8 @@ REDIS_INITTED_CACHE = "backups:repo_initted" REDIS_PROVIDER_KEY = "backups:provider" REDIS_AUTOBACKUP_PERIOD_KEY = "backups:autobackup_period" +REDIS_AUTOBACKUP_QUOTAS_KEY = "backups:autobackup_quotas_key" + redis = RedisPool().get_connection() @@ -35,6 +41,7 @@ class Storage: redis.delete(REDIS_PROVIDER_KEY) redis.delete(REDIS_AUTOBACKUP_PERIOD_KEY) redis.delete(REDIS_INITTED_CACHE) + redis.delete(REDIS_AUTOBACKUP_QUOTAS_KEY) prefixes_to_clean = [ REDIS_SNAPSHOTS_PREFIX, @@ -170,3 +177,23 @@ class Storage: def mark_as_uninitted(): """Marks the repository as initialized""" redis.delete(REDIS_INITTED_CACHE) + + @staticmethod + def set_autobackup_quotas(quotas: AutobackupQuotas) -> None: + store_model_as_hash(redis, REDIS_AUTOBACKUP_QUOTAS_KEY, quotas.to_pydantic()) + + @staticmethod + def autobackup_quotas() -> AutobackupQuotas: + quotas_model = hash_as_model( + redis, REDIS_AUTOBACKUP_QUOTAS_KEY, _AutobackupQuotas + ) + if quotas_model is None: + unlimited_quotas = AutobackupQuotas( + last=-1, + daily=-1, + weekly=-1, + monthly=-1, + yearly=-1, + ) + return unlimited_quotas + return AutobackupQuotas.from_pydantic(quotas_model) # pylint: disable=no-member diff --git a/selfprivacy_api/backup/tasks.py b/selfprivacy_api/backup/tasks.py index 2b6b79c..6520c70 100644 --- a/selfprivacy_api/backup/tasks.py +++ b/selfprivacy_api/backup/tasks.py @@ -3,13 +3,20 @@ The tasks module contains the worker tasks that are used to back up and restore """ from datetime import datetime, timezone -from selfprivacy_api.graphql.common_types.backup import RestoreStrategy +from selfprivacy_api.graphql.common_types.backup import ( + RestoreStrategy, + BackupReason, +) from selfprivacy_api.models.backup.snapshot import Snapshot from selfprivacy_api.utils.huey import huey from huey import crontab + from selfprivacy_api.services.service import Service +from selfprivacy_api.services import get_service_by_id from selfprivacy_api.backup import Backups +from selfprivacy_api.jobs import Jobs, JobStatus, Job + SNAPSHOT_CACHE_TTL_HOURS = 6 @@ -26,11 +33,30 @@ def validate_datetime(dt: datetime) -> bool: # huey tasks need to return something @huey.task() -def start_backup(service: Service) -> bool: +def start_backup(service_id: str, reason: BackupReason = BackupReason.EXPLICIT) -> bool: """ The worker task that starts the backup process. """ - Backups.back_up(service) + service = get_service_by_id(service_id) + if service is None: + raise ValueError(f"No such service: {service_id}") + Backups.back_up(service, reason) + return True + + +@huey.task() +def prune_autobackup_snapshots(job: Job) -> bool: + """ + Remove all autobackup snapshots that do not fit into quotas set + """ + Jobs.update(job, JobStatus.RUNNING) + try: + Backups.prune_all_autosnaps() + except Exception as e: + Jobs.update(job, JobStatus.ERROR, error=type(e).__name__ + ":" + str(e)) + return False + + Jobs.update(job, JobStatus.FINISHED) return True @@ -53,7 +79,7 @@ def automatic_backup(): """ time = datetime.utcnow().replace(tzinfo=timezone.utc) for service in Backups.services_to_back_up(time): - start_backup(service) + start_backup(service, BackupReason.AUTO) @huey.periodic_task(crontab(hour=SNAPSHOT_CACHE_TTL_HOURS)) diff --git a/selfprivacy_api/dependencies.py b/selfprivacy_api/dependencies.py index 095d087..1dfc0a9 100644 --- a/selfprivacy_api/dependencies.py +++ b/selfprivacy_api/dependencies.py @@ -27,4 +27,4 @@ async def get_token_header( def get_api_version() -> str: """Get API version""" - return "2.3.1" + return "3.0.0" diff --git a/selfprivacy_api/graphql/common_types/backup.py b/selfprivacy_api/graphql/common_types/backup.py index 992363b..953009d 100644 --- a/selfprivacy_api/graphql/common_types/backup.py +++ b/selfprivacy_api/graphql/common_types/backup.py @@ -1,10 +1,36 @@ """Backup""" # pylint: disable=too-few-public-methods -import strawberry from enum import Enum +import strawberry +from pydantic import BaseModel @strawberry.enum class RestoreStrategy(Enum): INPLACE = "INPLACE" DOWNLOAD_VERIFY_OVERWRITE = "DOWNLOAD_VERIFY_OVERWRITE" + + +@strawberry.enum +class BackupReason(Enum): + EXPLICIT = "EXPLICIT" + AUTO = "AUTO" + PRE_RESTORE = "PRE_RESTORE" + + +class _AutobackupQuotas(BaseModel): + last: int + daily: int + weekly: int + monthly: int + yearly: int + + +@strawberry.experimental.pydantic.type(model=_AutobackupQuotas, all_fields=True) +class AutobackupQuotas: + pass + + +@strawberry.experimental.pydantic.input(model=_AutobackupQuotas, all_fields=True) +class AutobackupQuotasInput: + pass diff --git a/selfprivacy_api/graphql/common_types/dns.py b/selfprivacy_api/graphql/common_types/dns.py index c9f8413..1c79036 100644 --- a/selfprivacy_api/graphql/common_types/dns.py +++ b/selfprivacy_api/graphql/common_types/dns.py @@ -11,3 +11,4 @@ class DnsRecord: content: str ttl: int priority: typing.Optional[int] + display_name: str diff --git a/selfprivacy_api/graphql/common_types/service.py b/selfprivacy_api/graphql/common_types/service.py index 836a3df..56e12b1 100644 --- a/selfprivacy_api/graphql/common_types/service.py +++ b/selfprivacy_api/graphql/common_types/service.py @@ -2,6 +2,7 @@ from enum import Enum import typing import strawberry import datetime +from selfprivacy_api.graphql.common_types.backup import BackupReason from selfprivacy_api.graphql.common_types.dns import DnsRecord from selfprivacy_api.services import get_service_by_id, get_services_by_location @@ -114,6 +115,7 @@ class SnapshotInfo: id: str service: Service created_at: datetime.datetime + reason: BackupReason def service_to_graphql_service(service: ServiceInterface) -> Service: @@ -137,6 +139,7 @@ def service_to_graphql_service(service: ServiceInterface) -> Service: content=record.content, ttl=record.ttl, priority=record.priority, + display_name=record.display_name, ) for record in service.get_dns_records() ], diff --git a/selfprivacy_api/graphql/common_types/user.py b/selfprivacy_api/graphql/common_types/user.py index 26ad6f2..a515821 100644 --- a/selfprivacy_api/graphql/common_types/user.py +++ b/selfprivacy_api/graphql/common_types/user.py @@ -17,7 +17,6 @@ class UserType(Enum): @strawberry.type class User: - user_type: UserType username: str # userHomeFolderspace: UserHomeFolderUsage @@ -32,7 +31,6 @@ class UserMutationReturn(MutationReturnInterface): def get_user_by_username(username: str) -> typing.Optional[User]: - user = users_actions.get_user_by_username(username) if user is None: return None diff --git a/selfprivacy_api/graphql/mutations/backup_mutations.py b/selfprivacy_api/graphql/mutations/backup_mutations.py index c022d57..820564c 100644 --- a/selfprivacy_api/graphql/mutations/backup_mutations.py +++ b/selfprivacy_api/graphql/mutations/backup_mutations.py @@ -1,6 +1,8 @@ import typing import strawberry +from selfprivacy_api.jobs import Jobs + from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.graphql.mutations.mutation_interface import ( GenericMutationReturn, @@ -11,11 +13,18 @@ from selfprivacy_api.graphql.queries.backup import BackupConfiguration from selfprivacy_api.graphql.queries.backup import Backup from selfprivacy_api.graphql.queries.providers import BackupProvider from selfprivacy_api.graphql.common_types.jobs import job_to_api_job -from selfprivacy_api.graphql.common_types.backup import RestoreStrategy +from selfprivacy_api.graphql.common_types.backup import ( + AutobackupQuotasInput, + RestoreStrategy, +) from selfprivacy_api.backup import Backups from selfprivacy_api.services import get_service_by_id -from selfprivacy_api.backup.tasks import start_backup, restore_snapshot +from selfprivacy_api.backup.tasks import ( + start_backup, + restore_snapshot, + prune_autobackup_snapshots, +) from selfprivacy_api.backup.jobs import add_backup_job, add_restore_job @@ -90,6 +99,41 @@ class BackupMutations: configuration=Backup().configuration(), ) + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def set_autobackup_quotas( + self, quotas: AutobackupQuotasInput + ) -> GenericBackupConfigReturn: + """ + Set autobackup quotas. + Values <=0 for any timeframe mean no limits for that timeframe. + To disable autobackup use autobackup period setting, not this mutation. + """ + + job = Jobs.add( + name="Trimming autobackup snapshots", + type_id="backups.autobackup_trimming", + description="Pruning the excessive snapshots after the new autobackup quotas are set", + ) + + try: + Backups.set_autobackup_quotas(quotas) + # this task is async and can fail with only a job to report the error + prune_autobackup_snapshots(job) + return GenericBackupConfigReturn( + success=True, + message="", + code=200, + configuration=Backup().configuration(), + ) + + except Exception as e: + return GenericBackupConfigReturn( + success=False, + message=type(e).__name__ + ":" + str(e), + code=400, + configuration=Backup().configuration(), + ) + @strawberry.mutation(permission_classes=[IsAuthenticated]) def start_backup(self, service_id: str) -> GenericJobMutationReturn: """Start backup""" @@ -104,7 +148,7 @@ class BackupMutations: ) job = add_backup_job(service) - start_backup(service) + start_backup(service_id) return GenericJobMutationReturn( success=True, diff --git a/selfprivacy_api/graphql/mutations/deprecated_mutations.py b/selfprivacy_api/graphql/mutations/deprecated_mutations.py index 6d187c6..d9f3e3a 100644 --- a/selfprivacy_api/graphql/mutations/deprecated_mutations.py +++ b/selfprivacy_api/graphql/mutations/deprecated_mutations.py @@ -20,6 +20,7 @@ from selfprivacy_api.graphql.mutations.mutation_interface import ( GenericMutationReturn, ) from selfprivacy_api.graphql.mutations.services_mutations import ( + ServiceJobMutationReturn, ServiceMutationReturn, ServicesMutations, ) @@ -201,7 +202,7 @@ class DeprecatedServicesMutations: "services", ) - move_service: ServiceMutationReturn = deprecated_mutation( + move_service: ServiceJobMutationReturn = deprecated_mutation( ServicesMutations.move_service, "services", ) diff --git a/selfprivacy_api/graphql/mutations/services_mutations.py b/selfprivacy_api/graphql/mutations/services_mutations.py index 86cab10..9bacf66 100644 --- a/selfprivacy_api/graphql/mutations/services_mutations.py +++ b/selfprivacy_api/graphql/mutations/services_mutations.py @@ -4,6 +4,7 @@ import typing import strawberry from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.graphql.common_types.jobs import job_to_api_job +from selfprivacy_api.jobs import JobStatus from selfprivacy_api.graphql.common_types.service import ( Service, @@ -47,14 +48,22 @@ class ServicesMutations: @strawberry.mutation(permission_classes=[IsAuthenticated]) def enable_service(self, service_id: str) -> ServiceMutationReturn: """Enable service.""" - service = get_service_by_id(service_id) - if service is None: + try: + service = get_service_by_id(service_id) + if service is None: + return ServiceMutationReturn( + success=False, + message="Service not found.", + code=404, + ) + service.enable() + except Exception as e: return ServiceMutationReturn( success=False, - message="Service not found.", - code=404, + message=format_error(e), + code=400, ) - service.enable() + return ServiceMutationReturn( success=True, message="Service enabled.", @@ -65,14 +74,21 @@ class ServicesMutations: @strawberry.mutation(permission_classes=[IsAuthenticated]) def disable_service(self, service_id: str) -> ServiceMutationReturn: """Disable service.""" - service = get_service_by_id(service_id) - if service is None: + try: + service = get_service_by_id(service_id) + if service is None: + return ServiceMutationReturn( + success=False, + message="Service not found.", + code=404, + ) + service.disable() + except Exception as e: return ServiceMutationReturn( success=False, - message="Service not found.", - code=404, + message=format_error(e), + code=400, ) - service.disable() return ServiceMutationReturn( success=True, message="Service disabled.", @@ -144,6 +160,8 @@ class ServicesMutations: message="Service not found.", code=404, ) + # TODO: make serviceImmovable and BlockdeviceNotFound exceptions + # in the move_to_volume() function and handle them here if not service.is_movable(): return ServiceJobMutationReturn( success=False, @@ -160,10 +178,31 @@ class ServicesMutations: service=service_to_graphql_service(service), ) job = service.move_to_volume(volume) - return ServiceJobMutationReturn( - success=True, - message="Service moved.", - code=200, - service=service_to_graphql_service(service), - job=job_to_api_job(job), - ) + if job.status in [JobStatus.CREATED, JobStatus.RUNNING]: + return ServiceJobMutationReturn( + success=True, + message="Started moving the service.", + code=200, + service=service_to_graphql_service(service), + job=job_to_api_job(job), + ) + elif job.status == JobStatus.FINISHED: + return ServiceJobMutationReturn( + success=True, + message="Service moved.", + code=200, + service=service_to_graphql_service(service), + job=job_to_api_job(job), + ) + else: + return ServiceJobMutationReturn( + success=False, + message=f"Service move failure: {job.status_text}", + code=400, + service=service_to_graphql_service(service), + job=job_to_api_job(job), + ) + + +def format_error(e: Exception) -> str: + return type(e).__name__ + ": " + str(e) diff --git a/selfprivacy_api/graphql/mutations/system_mutations.py b/selfprivacy_api/graphql/mutations/system_mutations.py index f286828..ee8ef15 100644 --- a/selfprivacy_api/graphql/mutations/system_mutations.py +++ b/selfprivacy_api/graphql/mutations/system_mutations.py @@ -12,6 +12,7 @@ from selfprivacy_api.graphql.mutations.mutation_interface import ( import selfprivacy_api.actions.system as system_actions from selfprivacy_api.graphql.common_types.jobs import job_to_api_job from selfprivacy_api.jobs.nix_collect_garbage import start_nix_collect_garbage +import selfprivacy_api.actions.ssh as ssh_actions @strawberry.type @@ -29,6 +30,22 @@ class AutoUpgradeSettingsMutationReturn(MutationReturnInterface): allowReboot: bool +@strawberry.type +class SSHSettingsMutationReturn(MutationReturnInterface): + """A return type for after changing SSH settings""" + + enable: bool + password_authentication: bool + + +@strawberry.input +class SSHSettingsInput: + """Input type for SSH settings""" + + enable: bool + password_authentication: bool + + @strawberry.input class AutoUpgradeSettingsInput: """Input type for auto upgrade settings""" @@ -80,40 +97,88 @@ class SystemMutations: ) @strawberry.mutation(permission_classes=[IsAuthenticated]) - def run_system_rebuild(self) -> GenericMutationReturn: - system_actions.rebuild_system() - return GenericMutationReturn( - success=True, - message="Starting rebuild system", - code=200, + def change_ssh_settings( + self, settings: SSHSettingsInput + ) -> SSHSettingsMutationReturn: + """Change ssh settings of the server.""" + ssh_actions.set_ssh_settings( + enable=settings.enable, + password_authentication=settings.password_authentication, ) + new_settings = ssh_actions.get_ssh_settings() + + return SSHSettingsMutationReturn( + success=True, + message="SSH settings changed", + code=200, + enable=new_settings.enable, + password_authentication=new_settings.passwordAuthentication, + ) + + @strawberry.mutation(permission_classes=[IsAuthenticated]) + def run_system_rebuild(self) -> GenericMutationReturn: + try: + system_actions.rebuild_system() + return GenericMutationReturn( + success=True, + message="Starting rebuild system", + code=200, + ) + except system_actions.ShellException as e: + return GenericMutationReturn( + success=False, + message=str(e), + code=500, + ) + @strawberry.mutation(permission_classes=[IsAuthenticated]) def run_system_rollback(self) -> GenericMutationReturn: system_actions.rollback_system() - return GenericMutationReturn( - success=True, - message="Starting rebuild system", - code=200, - ) + try: + return GenericMutationReturn( + success=True, + message="Starting rebuild system", + code=200, + ) + except system_actions.ShellException as e: + return GenericMutationReturn( + success=False, + message=str(e), + code=500, + ) @strawberry.mutation(permission_classes=[IsAuthenticated]) def run_system_upgrade(self) -> GenericMutationReturn: system_actions.upgrade_system() - return GenericMutationReturn( - success=True, - message="Starting rebuild system", - code=200, - ) + try: + return GenericMutationReturn( + success=True, + message="Starting rebuild system", + code=200, + ) + except system_actions.ShellException as e: + return GenericMutationReturn( + success=False, + message=str(e), + code=500, + ) @strawberry.mutation(permission_classes=[IsAuthenticated]) def reboot_system(self) -> GenericMutationReturn: system_actions.reboot_system() - return GenericMutationReturn( - success=True, - message="System reboot has started", - code=200, - ) + try: + return GenericMutationReturn( + success=True, + message="System reboot has started", + code=200, + ) + except system_actions.ShellException as e: + return GenericMutationReturn( + success=False, + message=str(e), + code=500, + ) @strawberry.mutation(permission_classes=[IsAuthenticated]) def pull_repository_changes(self) -> GenericMutationReturn: diff --git a/selfprivacy_api/graphql/mutations/users_mutations.py b/selfprivacy_api/graphql/mutations/users_mutations.py index f7317fb..7644b90 100644 --- a/selfprivacy_api/graphql/mutations/users_mutations.py +++ b/selfprivacy_api/graphql/mutations/users_mutations.py @@ -69,6 +69,12 @@ class UsersMutations: message=str(e), code=400, ) + except users_actions.InvalidConfiguration as e: + return UserMutationReturn( + success=False, + message=str(e), + code=400, + ) except users_actions.UserAlreadyExists as e: return UserMutationReturn( success=False, @@ -147,7 +153,7 @@ class UsersMutations: except InvalidPublicKey: return UserMutationReturn( success=False, - message="Invalid key type. Only ssh-ed25519 and ssh-rsa are supported", + message="Invalid key type. Only ssh-ed25519, ssh-rsa and ecdsa are supported", code=400, ) except UserNotFound: diff --git a/selfprivacy_api/graphql/queries/api_queries.py b/selfprivacy_api/graphql/queries/api_queries.py index cf56231..7052ded 100644 --- a/selfprivacy_api/graphql/queries/api_queries.py +++ b/selfprivacy_api/graphql/queries/api_queries.py @@ -38,7 +38,7 @@ class ApiRecoveryKeyStatus: def get_recovery_key_status() -> ApiRecoveryKeyStatus: - """Get recovery key status""" + """Get recovery key status, times are timezone-aware""" status = get_api_recovery_token_status() if status is None or not status.exists: return ApiRecoveryKeyStatus( diff --git a/selfprivacy_api/graphql/queries/backup.py b/selfprivacy_api/graphql/queries/backup.py index 6535a88..fc5f78a 100644 --- a/selfprivacy_api/graphql/queries/backup.py +++ b/selfprivacy_api/graphql/queries/backup.py @@ -13,6 +13,7 @@ from selfprivacy_api.graphql.common_types.service import ( SnapshotInfo, service_to_graphql_service, ) +from selfprivacy_api.graphql.common_types.backup import AutobackupQuotas from selfprivacy_api.services import get_service_by_id @@ -26,6 +27,8 @@ class BackupConfiguration: is_initialized: bool # If none, autobackups are disabled autobackup_period: typing.Optional[int] + # None is equal to all quotas being unlimited (-1). Optional for compatibility reasons. + autobackup_quotas: AutobackupQuotas # Bucket name for Backblaze, path for some other providers location_name: typing.Optional[str] location_id: typing.Optional[str] @@ -42,6 +45,7 @@ class Backup: autobackup_period=Backups.autobackup_period_minutes(), location_name=Backups.provider().location, location_id=Backups.provider().repo_id, + autobackup_quotas=Backups.autobackup_quotas(), ) @strawberry.field @@ -73,6 +77,7 @@ class Backup: id=snap.id, service=service, created_at=snap.created_at, + reason=snap.reason, ) result.append(graphql_snap) return result diff --git a/selfprivacy_api/graphql/queries/jobs.py b/selfprivacy_api/graphql/queries/jobs.py index 49bcbd7..e7b99e6 100644 --- a/selfprivacy_api/graphql/queries/jobs.py +++ b/selfprivacy_api/graphql/queries/jobs.py @@ -15,7 +15,6 @@ from selfprivacy_api.jobs import Jobs class Job: @strawberry.field def get_jobs(self) -> typing.List[ApiJob]: - Jobs.get_jobs() return [job_to_api_job(job) for job in Jobs.get_jobs()] diff --git a/selfprivacy_api/graphql/queries/system.py b/selfprivacy_api/graphql/queries/system.py index cc30fd7..82c9260 100644 --- a/selfprivacy_api/graphql/queries/system.py +++ b/selfprivacy_api/graphql/queries/system.py @@ -33,6 +33,7 @@ class SystemDomainInfo: content=record.content, ttl=record.ttl, priority=record.priority, + display_name=record.display_name, ) for record in get_all_required_dns_records() ] diff --git a/selfprivacy_api/jobs/__init__.py b/selfprivacy_api/jobs/__init__.py index 3fe452b..7310016 100644 --- a/selfprivacy_api/jobs/__init__.py +++ b/selfprivacy_api/jobs/__init__.py @@ -8,8 +8,8 @@ A job is a dictionary with the following keys: - name: name of the job - description: description of the job - status: status of the job - - created_at: date of creation of the job - - updated_at: date of last update of the job + - created_at: date of creation of the job, naive localtime + - updated_at: date of last update of the job, naive localtime - finished_at: date of finish of the job - error: error message if the job failed - result: result of the job @@ -224,6 +224,14 @@ class Jobs: return job + @staticmethod + def set_expiration(job: Job, expiration_seconds: int) -> Job: + redis = RedisPool().get_connection() + key = _redis_key_from_uuid(job.uid) + if redis.exists(key): + redis.expire(key, expiration_seconds) + return job + @staticmethod def get_job(uid: str) -> typing.Optional[Job]: """ diff --git a/selfprivacy_api/migrations/__init__.py b/selfprivacy_api/migrations/__init__.py index 33472b9..5e05b2d 100644 --- a/selfprivacy_api/migrations/__init__.py +++ b/selfprivacy_api/migrations/__init__.py @@ -8,33 +8,12 @@ at api.skippedMigrations in userdata.json and populating it with IDs of the migrations to skip. Adding DISABLE_ALL to that array disables the migrations module entirely. """ -from selfprivacy_api.migrations.check_for_failed_binds_migration import ( - CheckForFailedBindsMigration, -) -from selfprivacy_api.utils import ReadUserData -from selfprivacy_api.migrations.fix_nixos_config_branch import FixNixosConfigBranch -from selfprivacy_api.migrations.create_tokens_json import CreateTokensJson -from selfprivacy_api.migrations.migrate_to_selfprivacy_channel import ( - MigrateToSelfprivacyChannel, -) -from selfprivacy_api.migrations.mount_volume import MountVolume -from selfprivacy_api.migrations.providers import CreateProviderFields -from selfprivacy_api.migrations.prepare_for_nixos_2211 import ( - MigrateToSelfprivacyChannelFrom2205, -) -from selfprivacy_api.migrations.prepare_for_nixos_2305 import ( - MigrateToSelfprivacyChannelFrom2211, -) + +from selfprivacy_api.utils import ReadUserData, UserDataFiles +from selfprivacy_api.migrations.write_token_to_redis import WriteTokenToRedis migrations = [ - FixNixosConfigBranch(), - CreateTokensJson(), - MigrateToSelfprivacyChannel(), - MountVolume(), - CheckForFailedBindsMigration(), - CreateProviderFields(), - MigrateToSelfprivacyChannelFrom2205(), - MigrateToSelfprivacyChannelFrom2211(), + WriteTokenToRedis(), ] @@ -43,7 +22,7 @@ def run_migrations(): Go over all migrations. If they are not skipped in userdata file, run them if the migration needed. """ - with ReadUserData() as data: + with ReadUserData(UserDataFiles.SECRETS) as data: if "api" not in data: skipped_migrations = [] elif "skippedMigrations" not in data["api"]: diff --git a/selfprivacy_api/migrations/check_for_failed_binds_migration.py b/selfprivacy_api/migrations/check_for_failed_binds_migration.py index 85f73ec..e69de29 100644 --- a/selfprivacy_api/migrations/check_for_failed_binds_migration.py +++ b/selfprivacy_api/migrations/check_for_failed_binds_migration.py @@ -1,48 +0,0 @@ -from selfprivacy_api.jobs import JobStatus, Jobs - -from selfprivacy_api.migrations.migration import Migration -from selfprivacy_api.utils import WriteUserData - - -class CheckForFailedBindsMigration(Migration): - """Mount volume.""" - - def get_migration_name(self): - return "check_for_failed_binds_migration" - - def get_migration_description(self): - return "If binds migration failed, try again." - - def is_migration_needed(self): - try: - jobs = Jobs.get_jobs() - # If there is a job with type_id "migrations.migrate_to_binds" and status is not "FINISHED", - # then migration is needed and job is deleted - for job in jobs: - if ( - job.type_id == "migrations.migrate_to_binds" - and job.status != JobStatus.FINISHED - ): - return True - return False - except Exception as error: - print(error) - return False - - def migrate(self): - # Get info about existing volumes - # Write info about volumes to userdata.json - try: - jobs = Jobs.get_jobs() - for job in jobs: - if ( - job.type_id == "migrations.migrate_to_binds" - and job.status != JobStatus.FINISHED - ): - Jobs.remove(job) - with WriteUserData() as userdata: - userdata["useBinds"] = False - print("Done") - except Exception as error: - print(error) - print("Error mounting volume") diff --git a/selfprivacy_api/migrations/create_tokens_json.py b/selfprivacy_api/migrations/create_tokens_json.py deleted file mode 100644 index 38702f8..0000000 --- a/selfprivacy_api/migrations/create_tokens_json.py +++ /dev/null @@ -1,58 +0,0 @@ -from datetime import datetime -import os -import json -from pathlib import Path - -from selfprivacy_api.migrations.migration import Migration -from selfprivacy_api.utils import TOKENS_FILE, ReadUserData - - -class CreateTokensJson(Migration): - def get_migration_name(self): - return "create_tokens_json" - - def get_migration_description(self): - return """Selfprivacy API used a single token in userdata.json for authentication. - This migration creates a new tokens.json file with the old token in it. - This migration runs if the tokens.json file does not exist. - Old token is located at ["api"]["token"] in userdata.json. - tokens.json path is declared in TOKENS_FILE imported from utils.py - tokens.json must have the following format: - { - "tokens": [ - { - "token": "token_string", - "name": "Master Token", - "date": "current date from str(datetime.now())", - } - ] - } - tokens.json must have 0600 permissions. - """ - - def is_migration_needed(self): - return not os.path.exists(TOKENS_FILE) - - def migrate(self): - try: - print(f"Creating tokens.json file at {TOKENS_FILE}") - with ReadUserData() as userdata: - token = userdata["api"]["token"] - # Touch tokens.json with 0600 permissions - Path(TOKENS_FILE).touch(mode=0o600) - # Write token to tokens.json - structure = { - "tokens": [ - { - "token": token, - "name": "primary_token", - "date": str(datetime.now()), - } - ] - } - with open(TOKENS_FILE, "w", encoding="utf-8") as tokens: - json.dump(structure, tokens, indent=4) - print("Done") - except Exception as e: - print(e) - print("Error creating tokens.json") diff --git a/selfprivacy_api/migrations/fix_nixos_config_branch.py b/selfprivacy_api/migrations/fix_nixos_config_branch.py deleted file mode 100644 index fbb994c..0000000 --- a/selfprivacy_api/migrations/fix_nixos_config_branch.py +++ /dev/null @@ -1,57 +0,0 @@ -import os -import subprocess - -from selfprivacy_api.migrations.migration import Migration - - -class FixNixosConfigBranch(Migration): - def get_migration_name(self): - return "fix_nixos_config_branch" - - def get_migration_description(self): - return """Mobile SelfPrivacy app introduced a bug in version 0.4.0. - New servers were initialized with a rolling-testing nixos config branch. - This was fixed in app version 0.4.2, but existing servers were not updated. - This migration fixes this by changing the nixos config branch to master. - """ - - def is_migration_needed(self): - """Check the current branch of /etc/nixos and return True if it is rolling-testing""" - current_working_directory = os.getcwd() - try: - os.chdir("/etc/nixos") - nixos_config_branch = subprocess.check_output( - ["git", "rev-parse", "--abbrev-ref", "HEAD"], start_new_session=True - ) - os.chdir(current_working_directory) - return nixos_config_branch.decode("utf-8").strip() == "rolling-testing" - except subprocess.CalledProcessError: - os.chdir(current_working_directory) - return False - - def migrate(self): - """Affected server pulled the config with the --single-branch flag. - Git config remote.origin.fetch has to be changed, so all branches will be fetched. - Then, fetch all branches, pull and switch to master branch. - """ - print("Fixing Nixos config branch") - current_working_directory = os.getcwd() - try: - os.chdir("/etc/nixos") - - subprocess.check_output( - [ - "git", - "config", - "remote.origin.fetch", - "+refs/heads/*:refs/remotes/origin/*", - ] - ) - subprocess.check_output(["git", "fetch", "--all"]) - subprocess.check_output(["git", "pull"]) - subprocess.check_output(["git", "checkout", "master"]) - os.chdir(current_working_directory) - print("Done") - except subprocess.CalledProcessError: - os.chdir(current_working_directory) - print("Error") diff --git a/selfprivacy_api/migrations/migrate_to_selfprivacy_channel.py b/selfprivacy_api/migrations/migrate_to_selfprivacy_channel.py deleted file mode 100644 index 9bfd670..0000000 --- a/selfprivacy_api/migrations/migrate_to_selfprivacy_channel.py +++ /dev/null @@ -1,49 +0,0 @@ -import os -import subprocess - -from selfprivacy_api.migrations.migration import Migration - - -class MigrateToSelfprivacyChannel(Migration): - """Migrate to selfprivacy Nix channel.""" - - def get_migration_name(self): - return "migrate_to_selfprivacy_channel" - - def get_migration_description(self): - return "Migrate to selfprivacy Nix channel." - - def is_migration_needed(self): - try: - output = subprocess.check_output( - ["nix-channel", "--list"], start_new_session=True - ) - output = output.decode("utf-8") - first_line = output.split("\n", maxsplit=1)[0] - return first_line.startswith("nixos") and ( - first_line.endswith("nixos-21.11") or first_line.endswith("nixos-21.05") - ) - except subprocess.CalledProcessError: - return False - - def migrate(self): - # Change the channel and update them. - # Also, go to /etc/nixos directory and make a git pull - current_working_directory = os.getcwd() - try: - print("Changing channel") - os.chdir("/etc/nixos") - subprocess.check_output( - [ - "nix-channel", - "--add", - "https://channel.selfprivacy.org/nixos-selfpricacy", - "nixos", - ] - ) - subprocess.check_output(["nix-channel", "--update"]) - subprocess.check_output(["git", "pull"]) - os.chdir(current_working_directory) - except subprocess.CalledProcessError: - os.chdir(current_working_directory) - print("Error") diff --git a/selfprivacy_api/migrations/mount_volume.py b/selfprivacy_api/migrations/mount_volume.py deleted file mode 100644 index 27fba83..0000000 --- a/selfprivacy_api/migrations/mount_volume.py +++ /dev/null @@ -1,51 +0,0 @@ -import os -import subprocess - -from selfprivacy_api.migrations.migration import Migration -from selfprivacy_api.utils import ReadUserData, WriteUserData -from selfprivacy_api.utils.block_devices import BlockDevices - - -class MountVolume(Migration): - """Mount volume.""" - - def get_migration_name(self): - return "mount_volume" - - def get_migration_description(self): - return "Mount volume if it is not mounted." - - def is_migration_needed(self): - try: - with ReadUserData() as userdata: - return "volumes" not in userdata - except Exception as e: - print(e) - return False - - def migrate(self): - # Get info about existing volumes - # Write info about volumes to userdata.json - try: - volumes = BlockDevices().get_block_devices() - # If there is an unmounted volume sdb, - # Write it to userdata.json - is_there_a_volume = False - for volume in volumes: - if volume.name == "sdb": - is_there_a_volume = True - break - with WriteUserData() as userdata: - userdata["volumes"] = [] - if is_there_a_volume: - userdata["volumes"].append( - { - "device": "/dev/sdb", - "mountPoint": "/volumes/sdb", - "fsType": "ext4", - } - ) - print("Done") - except Exception as e: - print(e) - print("Error mounting volume") diff --git a/selfprivacy_api/migrations/prepare_for_nixos_2211.py b/selfprivacy_api/migrations/prepare_for_nixos_2211.py deleted file mode 100644 index 849c262..0000000 --- a/selfprivacy_api/migrations/prepare_for_nixos_2211.py +++ /dev/null @@ -1,58 +0,0 @@ -import os -import subprocess - -from selfprivacy_api.migrations.migration import Migration - - -class MigrateToSelfprivacyChannelFrom2205(Migration): - """Migrate to selfprivacy Nix channel. - For some reason NixOS 22.05 servers initialized with the nixos channel instead of selfprivacy. - This stops us from upgrading to NixOS 22.11 - """ - - def get_migration_name(self): - return "migrate_to_selfprivacy_channel_from_2205" - - def get_migration_description(self): - return "Migrate to selfprivacy Nix channel from NixOS 22.05." - - def is_migration_needed(self): - try: - output = subprocess.check_output( - ["nix-channel", "--list"], start_new_session=True - ) - output = output.decode("utf-8") - first_line = output.split("\n", maxsplit=1)[0] - return first_line.startswith("nixos") and ( - first_line.endswith("nixos-22.05") - ) - except subprocess.CalledProcessError: - return False - - def migrate(self): - # Change the channel and update them. - # Also, go to /etc/nixos directory and make a git pull - current_working_directory = os.getcwd() - try: - print("Changing channel") - os.chdir("/etc/nixos") - subprocess.check_output( - [ - "nix-channel", - "--add", - "https://channel.selfprivacy.org/nixos-selfpricacy", - "nixos", - ] - ) - subprocess.check_output(["nix-channel", "--update"]) - nixos_config_branch = subprocess.check_output( - ["git", "rev-parse", "--abbrev-ref", "HEAD"], start_new_session=True - ) - if nixos_config_branch.decode("utf-8").strip() == "api-redis": - print("Also changing nixos-config branch from api-redis to master") - subprocess.check_output(["git", "checkout", "master"]) - subprocess.check_output(["git", "pull"]) - os.chdir(current_working_directory) - except subprocess.CalledProcessError: - os.chdir(current_working_directory) - print("Error") diff --git a/selfprivacy_api/migrations/prepare_for_nixos_2305.py b/selfprivacy_api/migrations/prepare_for_nixos_2305.py deleted file mode 100644 index d9fed28..0000000 --- a/selfprivacy_api/migrations/prepare_for_nixos_2305.py +++ /dev/null @@ -1,58 +0,0 @@ -import os -import subprocess - -from selfprivacy_api.migrations.migration import Migration - - -class MigrateToSelfprivacyChannelFrom2211(Migration): - """Migrate to selfprivacy Nix channel. - For some reason NixOS 22.11 servers initialized with the nixos channel instead of selfprivacy. - This stops us from upgrading to NixOS 23.05 - """ - - def get_migration_name(self): - return "migrate_to_selfprivacy_channel_from_2211" - - def get_migration_description(self): - return "Migrate to selfprivacy Nix channel from NixOS 22.11." - - def is_migration_needed(self): - try: - output = subprocess.check_output( - ["nix-channel", "--list"], start_new_session=True - ) - output = output.decode("utf-8") - first_line = output.split("\n", maxsplit=1)[0] - return first_line.startswith("nixos") and ( - first_line.endswith("nixos-22.11") - ) - except subprocess.CalledProcessError: - return False - - def migrate(self): - # Change the channel and update them. - # Also, go to /etc/nixos directory and make a git pull - current_working_directory = os.getcwd() - try: - print("Changing channel") - os.chdir("/etc/nixos") - subprocess.check_output( - [ - "nix-channel", - "--add", - "https://channel.selfprivacy.org/nixos-selfpricacy", - "nixos", - ] - ) - subprocess.check_output(["nix-channel", "--update"]) - nixos_config_branch = subprocess.check_output( - ["git", "rev-parse", "--abbrev-ref", "HEAD"], start_new_session=True - ) - if nixos_config_branch.decode("utf-8").strip() == "api-redis": - print("Also changing nixos-config branch from api-redis to master") - subprocess.check_output(["git", "checkout", "master"]) - subprocess.check_output(["git", "pull"]) - os.chdir(current_working_directory) - except subprocess.CalledProcessError: - os.chdir(current_working_directory) - print("Error") diff --git a/selfprivacy_api/migrations/providers.py b/selfprivacy_api/migrations/providers.py deleted file mode 100644 index 2cd5d5e..0000000 --- a/selfprivacy_api/migrations/providers.py +++ /dev/null @@ -1,43 +0,0 @@ -from selfprivacy_api.migrations.migration import Migration -from selfprivacy_api.utils import ReadUserData, WriteUserData - - -class CreateProviderFields(Migration): - """Unhardcode providers""" - - def get_migration_name(self): - return "create_provider_fields" - - def get_migration_description(self): - return "Add DNS, backup and server provider fields to enable user to choose between different clouds and to make the deployment adapt to these preferences." - - def is_migration_needed(self): - try: - with ReadUserData() as userdata: - return "dns" not in userdata - except Exception as e: - print(e) - return False - - def migrate(self): - # Write info about providers to userdata.json - try: - with WriteUserData() as userdata: - userdata["dns"] = { - "provider": "CLOUDFLARE", - "apiKey": userdata["cloudflare"]["apiKey"], - } - userdata["server"] = { - "provider": "HETZNER", - } - userdata["backup"] = { - "provider": "BACKBLAZE", - "accountId": userdata["backblaze"]["accountId"], - "accountKey": userdata["backblaze"]["accountKey"], - "bucket": userdata["backblaze"]["bucket"], - } - - print("Done") - except Exception as e: - print(e) - print("Error migrating provider fields") diff --git a/selfprivacy_api/migrations/write_token_to_redis.py b/selfprivacy_api/migrations/write_token_to_redis.py new file mode 100644 index 0000000..aab4f72 --- /dev/null +++ b/selfprivacy_api/migrations/write_token_to_redis.py @@ -0,0 +1,63 @@ +from datetime import datetime +from typing import Optional +from selfprivacy_api.migrations.migration import Migration +from selfprivacy_api.models.tokens.token import Token + +from selfprivacy_api.repositories.tokens.redis_tokens_repository import ( + RedisTokensRepository, +) +from selfprivacy_api.repositories.tokens.abstract_tokens_repository import ( + AbstractTokensRepository, +) +from selfprivacy_api.utils import ReadUserData, UserDataFiles + + +class WriteTokenToRedis(Migration): + """Load Json tokens into Redis""" + + def get_migration_name(self): + return "write_token_to_redis" + + def get_migration_description(self): + return "Loads the initial token into redis token storage" + + def is_repo_empty(self, repo: AbstractTokensRepository) -> bool: + if repo.get_tokens() != []: + return False + return True + + def get_token_from_json(self) -> Optional[Token]: + try: + with ReadUserData(UserDataFiles.SECRETS) as userdata: + return Token( + token=userdata["api"]["token"], + device_name="Initial device", + created_at=datetime.now(), + ) + except Exception as e: + print(e) + return None + + def is_migration_needed(self): + try: + if self.get_token_from_json() is not None and self.is_repo_empty( + RedisTokensRepository() + ): + return True + except Exception as e: + print(e) + return False + + def migrate(self): + # Write info about providers to userdata.json + try: + token = self.get_token_from_json() + if token is None: + print("No token found in secrets.json") + return + RedisTokensRepository()._store_token(token) + + print("Done") + except Exception as e: + print(e) + print("Error migrating access tokens from json to redis") diff --git a/selfprivacy_api/models/backup/snapshot.py b/selfprivacy_api/models/backup/snapshot.py index 9893f03..b2831e7 100644 --- a/selfprivacy_api/models/backup/snapshot.py +++ b/selfprivacy_api/models/backup/snapshot.py @@ -1,8 +1,11 @@ import datetime from pydantic import BaseModel +from selfprivacy_api.graphql.common_types.backup import BackupReason + class Snapshot(BaseModel): id: str service_name: str created_at: datetime.datetime + reason: BackupReason = BackupReason.EXPLICIT diff --git a/selfprivacy_api/models/tokens/new_device_key.py b/selfprivacy_api/models/tokens/new_device_key.py index dda926c..241cbd3 100644 --- a/selfprivacy_api/models/tokens/new_device_key.py +++ b/selfprivacy_api/models/tokens/new_device_key.py @@ -1,11 +1,13 @@ """ New device key used to obtain access token. """ -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone import secrets from pydantic import BaseModel from mnemonic import Mnemonic +from selfprivacy_api.models.tokens.time import is_past + class NewDeviceKey(BaseModel): """ @@ -20,15 +22,15 @@ class NewDeviceKey(BaseModel): def is_valid(self) -> bool: """ - Check if the recovery key is valid. + Check if key is valid. """ - if self.expires_at < datetime.now(): + if is_past(self.expires_at): return False return True def as_mnemonic(self) -> str: """ - Get the recovery key as a mnemonic. + Get the key as a mnemonic. """ return Mnemonic(language="english").to_mnemonic(bytes.fromhex(self.key)) @@ -37,10 +39,10 @@ class NewDeviceKey(BaseModel): """ Factory to generate a random token. """ - creation_date = datetime.now() + creation_date = datetime.now(timezone.utc) key = secrets.token_bytes(16).hex() return NewDeviceKey( key=key, created_at=creation_date, - expires_at=datetime.now() + timedelta(minutes=10), + expires_at=creation_date + timedelta(minutes=10), ) diff --git a/selfprivacy_api/models/tokens/recovery_key.py b/selfprivacy_api/models/tokens/recovery_key.py index 098aceb..3f52735 100644 --- a/selfprivacy_api/models/tokens/recovery_key.py +++ b/selfprivacy_api/models/tokens/recovery_key.py @@ -3,12 +3,14 @@ Recovery key used to obtain access token. Recovery key has a token string, date of creation, optional date of expiration and optional count of uses left. """ -from datetime import datetime +from datetime import datetime, timezone import secrets from typing import Optional from pydantic import BaseModel from mnemonic import Mnemonic +from selfprivacy_api.models.tokens.time import is_past, ensure_timezone + class RecoveryKey(BaseModel): """ @@ -26,7 +28,7 @@ class RecoveryKey(BaseModel): """ Check if the recovery key is valid. """ - if self.expires_at is not None and self.expires_at < datetime.now(): + if self.expires_at is not None and is_past(self.expires_at): return False if self.uses_left is not None and self.uses_left <= 0: return False @@ -45,8 +47,11 @@ class RecoveryKey(BaseModel): ) -> "RecoveryKey": """ Factory to generate a random token. + If passed naive time as expiration, assumes utc """ - creation_date = datetime.now() + creation_date = datetime.now(timezone.utc) + if expiration is not None: + expiration = ensure_timezone(expiration) key = secrets.token_bytes(24).hex() return RecoveryKey( key=key, diff --git a/selfprivacy_api/models/tokens/time.py b/selfprivacy_api/models/tokens/time.py new file mode 100644 index 0000000..967fcfb --- /dev/null +++ b/selfprivacy_api/models/tokens/time.py @@ -0,0 +1,14 @@ +from datetime import datetime, timezone + + +def is_past(dt: datetime) -> bool: + # we cannot compare a naive now() + # to dt which might be tz-aware or unaware + dt = ensure_timezone(dt) + return dt < datetime.now(timezone.utc) + + +def ensure_timezone(dt: datetime) -> datetime: + if dt.tzinfo is None or dt.tzinfo.utcoffset(None) is None: + dt = dt.replace(tzinfo=timezone.utc) + return dt diff --git a/selfprivacy_api/repositories/tokens/__init__.py b/selfprivacy_api/repositories/tokens/__init__.py index 9941bdc..e69de29 100644 --- a/selfprivacy_api/repositories/tokens/__init__.py +++ b/selfprivacy_api/repositories/tokens/__init__.py @@ -1,8 +0,0 @@ -from selfprivacy_api.repositories.tokens.abstract_tokens_repository import ( - AbstractTokensRepository, -) -from selfprivacy_api.repositories.tokens.json_tokens_repository import ( - JsonTokensRepository, -) - -repository = JsonTokensRepository() diff --git a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py index 3a20ede..d81bd65 100644 --- a/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/abstract_tokens_repository.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from abc import ABC, abstractmethod from datetime import datetime from typing import Optional @@ -86,13 +88,15 @@ class AbstractTokensRepository(ABC): def get_recovery_key(self) -> Optional[RecoveryKey]: """Get the recovery key""" - @abstractmethod def create_recovery_key( self, expiration: Optional[datetime], uses_left: Optional[int], ) -> RecoveryKey: """Create the recovery key""" + recovery_key = RecoveryKey.generate(expiration, uses_left) + self._store_recovery_key(recovery_key) + return recovery_key def use_mnemonic_recovery_key( self, mnemonic_phrase: str, device_name: str @@ -123,6 +127,14 @@ class AbstractTokensRepository(ABC): return False return recovery_key.is_valid() + @abstractmethod + def _store_recovery_key(self, recovery_key: RecoveryKey) -> None: + """Store recovery key directly""" + + @abstractmethod + def _delete_recovery_key(self) -> None: + """Delete the recovery key""" + def get_new_device_key(self) -> NewDeviceKey: """Creates and returns the new device key""" new_device_key = NewDeviceKey.generate() @@ -156,6 +168,26 @@ class AbstractTokensRepository(ABC): return new_token + def reset(self): + for token in self.get_tokens(): + self.delete_token(token) + self.delete_new_device_key() + self._delete_recovery_key() + + def clone(self, source: AbstractTokensRepository) -> None: + """Clone the state of another repository to this one""" + self.reset() + for token in source.get_tokens(): + self._store_token(token) + + recovery_key = source.get_recovery_key() + if recovery_key is not None: + self._store_recovery_key(recovery_key) + + new_device_key = source._get_stored_new_device_key() + if new_device_key is not None: + self._store_new_device_key(new_device_key) + @abstractmethod def _store_token(self, new_token: Token): """Store a token directly""" diff --git a/selfprivacy_api/repositories/tokens/json_tokens_repository.py b/selfprivacy_api/repositories/tokens/json_tokens_repository.py deleted file mode 100644 index 77e1311..0000000 --- a/selfprivacy_api/repositories/tokens/json_tokens_repository.py +++ /dev/null @@ -1,133 +0,0 @@ -""" -temporary legacy -""" -from typing import Optional -from datetime import datetime - -from selfprivacy_api.utils import UserDataFiles, WriteUserData, ReadUserData -from selfprivacy_api.models.tokens.token import Token -from selfprivacy_api.models.tokens.recovery_key import RecoveryKey -from selfprivacy_api.models.tokens.new_device_key import NewDeviceKey -from selfprivacy_api.repositories.tokens.exceptions import ( - TokenNotFound, -) -from selfprivacy_api.repositories.tokens.abstract_tokens_repository import ( - AbstractTokensRepository, -) - -DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%f" - - -class JsonTokensRepository(AbstractTokensRepository): - def get_tokens(self) -> list[Token]: - """Get the tokens""" - tokens_list = [] - - with ReadUserData(UserDataFiles.TOKENS) as tokens_file: - for userdata_token in tokens_file["tokens"]: - tokens_list.append( - Token( - token=userdata_token["token"], - device_name=userdata_token["name"], - created_at=userdata_token["date"], - ) - ) - - return tokens_list - - def _store_token(self, new_token: Token): - """Store a token directly""" - with WriteUserData(UserDataFiles.TOKENS) as tokens_file: - tokens_file["tokens"].append( - { - "token": new_token.token, - "name": new_token.device_name, - "date": new_token.created_at.strftime(DATETIME_FORMAT), - } - ) - - def delete_token(self, input_token: Token) -> None: - """Delete the token""" - with WriteUserData(UserDataFiles.TOKENS) as tokens_file: - for userdata_token in tokens_file["tokens"]: - if userdata_token["token"] == input_token.token: - tokens_file["tokens"].remove(userdata_token) - return - - raise TokenNotFound("Token not found!") - - def get_recovery_key(self) -> Optional[RecoveryKey]: - """Get the recovery key""" - with ReadUserData(UserDataFiles.TOKENS) as tokens_file: - - if ( - "recovery_token" not in tokens_file - or tokens_file["recovery_token"] is None - ): - return - - recovery_key = RecoveryKey( - key=tokens_file["recovery_token"].get("token"), - created_at=tokens_file["recovery_token"].get("date"), - expires_at=tokens_file["recovery_token"].get("expiration"), - uses_left=tokens_file["recovery_token"].get("uses_left"), - ) - - return recovery_key - - def create_recovery_key( - self, - expiration: Optional[datetime], - uses_left: Optional[int], - ) -> RecoveryKey: - """Create the recovery key""" - - recovery_key = RecoveryKey.generate(expiration, uses_left) - - with WriteUserData(UserDataFiles.TOKENS) as tokens_file: - key_expiration: Optional[str] = None - if recovery_key.expires_at is not None: - key_expiration = recovery_key.expires_at.strftime(DATETIME_FORMAT) - tokens_file["recovery_token"] = { - "token": recovery_key.key, - "date": recovery_key.created_at.strftime(DATETIME_FORMAT), - "expiration": key_expiration, - "uses_left": recovery_key.uses_left, - } - - return recovery_key - - def _decrement_recovery_token(self): - """Decrement recovery key use count by one""" - if self.is_recovery_key_valid(): - with WriteUserData(UserDataFiles.TOKENS) as tokens: - if tokens["recovery_token"]["uses_left"] is not None: - tokens["recovery_token"]["uses_left"] -= 1 - - def _store_new_device_key(self, new_device_key: NewDeviceKey) -> None: - with WriteUserData(UserDataFiles.TOKENS) as tokens_file: - tokens_file["new_device"] = { - "token": new_device_key.key, - "date": new_device_key.created_at.strftime(DATETIME_FORMAT), - "expiration": new_device_key.expires_at.strftime(DATETIME_FORMAT), - } - - def delete_new_device_key(self) -> None: - """Delete the new device key""" - with WriteUserData(UserDataFiles.TOKENS) as tokens_file: - if "new_device" in tokens_file: - del tokens_file["new_device"] - return - - def _get_stored_new_device_key(self) -> Optional[NewDeviceKey]: - """Retrieves new device key that is already stored.""" - with ReadUserData(UserDataFiles.TOKENS) as tokens_file: - if "new_device" not in tokens_file or tokens_file["new_device"] is None: - return - - new_device_key = NewDeviceKey( - key=tokens_file["new_device"]["token"], - created_at=tokens_file["new_device"]["date"], - expires_at=tokens_file["new_device"]["expiration"], - ) - return new_device_key diff --git a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py index 8e683d2..834794c 100644 --- a/selfprivacy_api/repositories/tokens/redis_tokens_repository.py +++ b/selfprivacy_api/repositories/tokens/redis_tokens_repository.py @@ -4,6 +4,7 @@ Token repository using Redis as backend. from typing import Any, Optional from datetime import datetime from hashlib import md5 +from datetime import timezone from selfprivacy_api.repositories.tokens.abstract_tokens_repository import ( AbstractTokensRepository, @@ -53,6 +54,7 @@ class RedisTokensRepository(AbstractTokensRepository): token = self._token_from_hash(key) if token == input_token: return key + return None def delete_token(self, input_token: Token) -> None: """Delete the token""" @@ -62,13 +64,6 @@ class RedisTokensRepository(AbstractTokensRepository): raise TokenNotFound redis.delete(key) - def reset(self): - for token in self.get_tokens(): - self.delete_token(token) - self.delete_new_device_key() - redis = self.connection - redis.delete(RECOVERY_KEY_REDIS_KEY) - def get_recovery_key(self) -> Optional[RecoveryKey]: """Get the recovery key""" redis = self.connection @@ -76,15 +71,13 @@ class RedisTokensRepository(AbstractTokensRepository): return self._recovery_key_from_hash(RECOVERY_KEY_REDIS_KEY) return None - def create_recovery_key( - self, - expiration: Optional[datetime], - uses_left: Optional[int], - ) -> RecoveryKey: - """Create the recovery key""" - recovery_key = RecoveryKey.generate(expiration=expiration, uses_left=uses_left) + def _store_recovery_key(self, recovery_key: RecoveryKey) -> None: self._store_model_as_hash(RECOVERY_KEY_REDIS_KEY, recovery_key) - return recovery_key + + def _delete_recovery_key(self) -> None: + """Delete the recovery key""" + redis = self.connection + redis.delete(RECOVERY_KEY_REDIS_KEY) def _store_new_device_key(self, new_device_key: NewDeviceKey) -> None: """Store new device key directly""" @@ -157,6 +150,7 @@ class RedisTokensRepository(AbstractTokensRepository): if token is not None: token.created_at = token.created_at.replace(tzinfo=None) return token + return None def _recovery_key_from_hash(self, redis_key: str) -> Optional[RecoveryKey]: return self._hash_as_model(redis_key, RecoveryKey) @@ -168,5 +162,7 @@ class RedisTokensRepository(AbstractTokensRepository): redis = self.connection for key, value in model.dict().items(): if isinstance(value, datetime): + if value.tzinfo is None: + value = value.replace(tzinfo=timezone.utc) value = value.isoformat() redis.hset(redis_key, key, str(value)) diff --git a/selfprivacy_api/rest/api_auth.py b/selfprivacy_api/rest/api_auth.py deleted file mode 100644 index 275dac3..0000000 --- a/selfprivacy_api/rest/api_auth.py +++ /dev/null @@ -1,125 +0,0 @@ -from datetime import datetime -from typing import Optional -from fastapi import APIRouter, Depends, HTTPException -from pydantic import BaseModel -from selfprivacy_api.actions.api_tokens import ( - CannotDeleteCallerException, - InvalidExpirationDate, - InvalidUsesLeft, - NotFoundException, - delete_api_token, - refresh_api_token, - get_api_recovery_token_status, - get_api_tokens_with_caller_flag, - get_new_api_recovery_key, - use_mnemonic_recovery_token, - delete_new_device_auth_token, - get_new_device_auth_token, - use_new_device_auth_token, -) - -from selfprivacy_api.dependencies import TokenHeader, get_token_header - - -router = APIRouter( - prefix="/auth", - tags=["auth"], - responses={404: {"description": "Not found"}}, -) - - -@router.get("/tokens") -async def rest_get_tokens(auth_token: TokenHeader = Depends(get_token_header)): - """Get the tokens info""" - return get_api_tokens_with_caller_flag(auth_token.token) - - -class DeleteTokenInput(BaseModel): - """Delete token input""" - - token_name: str - - -@router.delete("/tokens") -async def rest_delete_tokens( - token: DeleteTokenInput, auth_token: TokenHeader = Depends(get_token_header) -): - """Delete the tokens""" - try: - delete_api_token(auth_token.token, token.token_name) - except NotFoundException: - raise HTTPException(status_code=404, detail="Token not found") - except CannotDeleteCallerException: - raise HTTPException(status_code=400, detail="Cannot delete caller's token") - return {"message": "Token deleted"} - - -@router.post("/tokens") -async def rest_refresh_token(auth_token: TokenHeader = Depends(get_token_header)): - """Refresh the token""" - try: - new_token = refresh_api_token(auth_token.token) - except NotFoundException: - raise HTTPException(status_code=404, detail="Token not found") - return {"token": new_token} - - -@router.get("/recovery_token") -async def rest_get_recovery_token_status( - auth_token: TokenHeader = Depends(get_token_header), -): - return get_api_recovery_token_status() - - -class CreateRecoveryTokenInput(BaseModel): - expiration: Optional[datetime] = None - uses: Optional[int] = None - - -@router.post("/recovery_token") -async def rest_create_recovery_token( - limits: CreateRecoveryTokenInput = CreateRecoveryTokenInput(), - auth_token: TokenHeader = Depends(get_token_header), -): - try: - token = get_new_api_recovery_key(limits.expiration, limits.uses) - except InvalidExpirationDate as e: - raise HTTPException(status_code=400, detail=str(e)) - except InvalidUsesLeft as e: - raise HTTPException(status_code=400, detail=str(e)) - return {"token": token} - - -class UseTokenInput(BaseModel): - token: str - device: str - - -@router.post("/recovery_token/use") -async def rest_use_recovery_token(input: UseTokenInput): - token = use_mnemonic_recovery_token(input.token, input.device) - if token is None: - raise HTTPException(status_code=404, detail="Token not found") - return {"token": token} - - -@router.post("/new_device") -async def rest_new_device(auth_token: TokenHeader = Depends(get_token_header)): - token = get_new_device_auth_token() - return {"token": token} - - -@router.delete("/new_device") -async def rest_delete_new_device_token( - auth_token: TokenHeader = Depends(get_token_header), -): - delete_new_device_auth_token() - return {"token": None} - - -@router.post("/new_device/authorize") -async def rest_new_device_authorize(input: UseTokenInput): - token = use_new_device_auth_token(input.token, input.device) - if token is None: - raise HTTPException(status_code=404, detail="Token not found") - return {"message": "Device authorized", "token": token} diff --git a/selfprivacy_api/rest/services.py b/selfprivacy_api/rest/services.py deleted file mode 100644 index c6dc12e..0000000 --- a/selfprivacy_api/rest/services.py +++ /dev/null @@ -1,336 +0,0 @@ -"""Basic services legacy api""" -import base64 -from typing import Optional -from fastapi import APIRouter, Depends, HTTPException -from pydantic import BaseModel -from selfprivacy_api.actions.ssh import ( - InvalidPublicKey, - KeyAlreadyExists, - KeyNotFound, - create_ssh_key, - enable_ssh, - get_ssh_settings, - remove_ssh_key, - set_ssh_settings, -) -from selfprivacy_api.actions.users import UserNotFound, get_user_by_username - -from selfprivacy_api.dependencies import get_token_header -from selfprivacy_api.services.bitwarden import Bitwarden -from selfprivacy_api.services.gitea import Gitea -from selfprivacy_api.services.mailserver import MailServer -from selfprivacy_api.services.nextcloud import Nextcloud -from selfprivacy_api.services.ocserv import Ocserv -from selfprivacy_api.services.pleroma import Pleroma -from selfprivacy_api.services.service import ServiceStatus -from selfprivacy_api.utils import get_dkim_key, get_domain - -router = APIRouter( - prefix="/services", - tags=["services"], - dependencies=[Depends(get_token_header)], - responses={404: {"description": "Not found"}}, -) - - -def service_status_to_return_code(status: ServiceStatus): - """Converts service status object to return code for - compatibility with legacy api""" - if status == ServiceStatus.ACTIVE: - return 0 - elif status == ServiceStatus.FAILED: - return 1 - elif status == ServiceStatus.INACTIVE: - return 3 - elif status == ServiceStatus.OFF: - return 4 - else: - return 2 - - -@router.get("/status") -async def get_status(): - """Get the status of the services""" - mail_status = MailServer.get_status() - bitwarden_status = Bitwarden.get_status() - gitea_status = Gitea.get_status() - nextcloud_status = Nextcloud.get_status() - ocserv_stauts = Ocserv.get_status() - pleroma_status = Pleroma.get_status() - - return { - "imap": service_status_to_return_code(mail_status), - "smtp": service_status_to_return_code(mail_status), - "http": 0, - "bitwarden": service_status_to_return_code(bitwarden_status), - "gitea": service_status_to_return_code(gitea_status), - "nextcloud": service_status_to_return_code(nextcloud_status), - "ocserv": service_status_to_return_code(ocserv_stauts), - "pleroma": service_status_to_return_code(pleroma_status), - } - - -@router.post("/bitwarden/enable") -async def enable_bitwarden(): - """Enable Bitwarden""" - Bitwarden.enable() - return { - "status": 0, - "message": "Bitwarden enabled", - } - - -@router.post("/bitwarden/disable") -async def disable_bitwarden(): - """Disable Bitwarden""" - Bitwarden.disable() - return { - "status": 0, - "message": "Bitwarden disabled", - } - - -@router.post("/gitea/enable") -async def enable_gitea(): - """Enable Gitea""" - Gitea.enable() - return { - "status": 0, - "message": "Gitea enabled", - } - - -@router.post("/gitea/disable") -async def disable_gitea(): - """Disable Gitea""" - Gitea.disable() - return { - "status": 0, - "message": "Gitea disabled", - } - - -@router.get("/mailserver/dkim") -async def get_mailserver_dkim(): - """Get the DKIM record for the mailserver""" - domain = get_domain() - - dkim = get_dkim_key(domain, parse=False) - if dkim is None: - raise HTTPException(status_code=404, detail="DKIM record not found") - dkim = base64.b64encode(dkim.encode("utf-8")).decode("utf-8") - return dkim - - -@router.post("/nextcloud/enable") -async def enable_nextcloud(): - """Enable Nextcloud""" - Nextcloud.enable() - return { - "status": 0, - "message": "Nextcloud enabled", - } - - -@router.post("/nextcloud/disable") -async def disable_nextcloud(): - """Disable Nextcloud""" - Nextcloud.disable() - return { - "status": 0, - "message": "Nextcloud disabled", - } - - -@router.post("/ocserv/enable") -async def enable_ocserv(): - """Enable Ocserv""" - Ocserv.enable() - return { - "status": 0, - "message": "Ocserv enabled", - } - - -@router.post("/ocserv/disable") -async def disable_ocserv(): - """Disable Ocserv""" - Ocserv.disable() - return { - "status": 0, - "message": "Ocserv disabled", - } - - -@router.post("/pleroma/enable") -async def enable_pleroma(): - """Enable Pleroma""" - Pleroma.enable() - return { - "status": 0, - "message": "Pleroma enabled", - } - - -@router.post("/pleroma/disable") -async def disable_pleroma(): - """Disable Pleroma""" - Pleroma.disable() - return { - "status": 0, - "message": "Pleroma disabled", - } - - -@router.get("/restic/backup/list") -async def get_restic_backup_list(): - raise HTTPException( - status_code=410, - detail="This endpoint is deprecated, please use GraphQL API", - ) - - -@router.put("/restic/backup/create") -async def create_restic_backup(): - raise HTTPException( - status_code=410, - detail="This endpoint is deprecated, please use GraphQL API", - ) - - -@router.get("/restic/backup/status") -async def get_restic_backup_status(): - raise HTTPException( - status_code=410, - detail="This endpoint is deprecated, please use GraphQL API", - ) - - -@router.get("/restic/backup/reload") -async def reload_restic_backup(): - raise HTTPException( - status_code=410, - detail="This endpoint is deprecated, please use GraphQL API", - ) - - -class BackupRestoreInput(BaseModel): - backupId: str - - -@router.put("/restic/backup/restore") -async def restore_restic_backup(backup: BackupRestoreInput): - raise HTTPException( - status_code=410, - detail="This endpoint is deprecated, please use GraphQL API", - ) - - -class BackupConfigInput(BaseModel): - accountId: str - accountKey: str - bucket: str - - -@router.put("/restic/backblaze/config") -async def set_backblaze_config(backup_config: BackupConfigInput): - raise HTTPException( - status_code=410, - detail="This endpoint is deprecated, please use GraphQL API", - ) - - -@router.post("/ssh/enable") -async def rest_enable_ssh(): - """Enable SSH""" - enable_ssh() - return { - "status": 0, - "message": "SSH enabled", - } - - -@router.get("/ssh") -async def rest_get_ssh(): - """Get the SSH configuration""" - settings = get_ssh_settings() - return { - "enable": settings.enable, - "passwordAuthentication": settings.passwordAuthentication, - } - - -class SshConfigInput(BaseModel): - enable: Optional[bool] = None - passwordAuthentication: Optional[bool] = None - - -@router.put("/ssh") -async def rest_set_ssh(ssh_config: SshConfigInput): - """Set the SSH configuration""" - set_ssh_settings(ssh_config.enable, ssh_config.passwordAuthentication) - - return "SSH settings changed" - - -class SshKeyInput(BaseModel): - public_key: str - - -@router.put("/ssh/key/send", status_code=201) -async def rest_send_ssh_key(input: SshKeyInput): - """Send the SSH key""" - try: - create_ssh_key("root", input.public_key) - except KeyAlreadyExists as error: - raise HTTPException(status_code=409, detail="Key already exists") from error - except InvalidPublicKey as error: - raise HTTPException( - status_code=400, - detail="Invalid key type. Only ssh-ed25519 and ssh-rsa are supported", - ) from error - - return { - "status": 0, - "message": "SSH key sent", - } - - -@router.get("/ssh/keys/{username}") -async def rest_get_ssh_keys(username: str): - """Get the SSH keys for a user""" - user = get_user_by_username(username) - if user is None: - raise HTTPException(status_code=404, detail="User not found") - - return user.ssh_keys - - -@router.post("/ssh/keys/{username}", status_code=201) -async def rest_add_ssh_key(username: str, input: SshKeyInput): - try: - create_ssh_key(username, input.public_key) - except KeyAlreadyExists as error: - raise HTTPException(status_code=409, detail="Key already exists") from error - except InvalidPublicKey as error: - raise HTTPException( - status_code=400, - detail="Invalid key type. Only ssh-ed25519 and ssh-rsa are supported", - ) from error - except UserNotFound as error: - raise HTTPException(status_code=404, detail="User not found") from error - - return { - "message": "New SSH key successfully written", - } - - -@router.delete("/ssh/keys/{username}") -async def rest_delete_ssh_key(username: str, input: SshKeyInput): - try: - remove_ssh_key(username, input.public_key) - except KeyNotFound as error: - raise HTTPException(status_code=404, detail="Key not found") from error - except UserNotFound as error: - raise HTTPException(status_code=404, detail="User not found") from error - return {"message": "SSH key deleted"} diff --git a/selfprivacy_api/rest/system.py b/selfprivacy_api/rest/system.py deleted file mode 100644 index 9933fb3..0000000 --- a/selfprivacy_api/rest/system.py +++ /dev/null @@ -1,105 +0,0 @@ -from typing import Optional -from fastapi import APIRouter, Body, Depends, HTTPException -from pydantic import BaseModel - -from selfprivacy_api.dependencies import get_token_header - -import selfprivacy_api.actions.system as system_actions - -router = APIRouter( - prefix="/system", - tags=["system"], - dependencies=[Depends(get_token_header)], - responses={404: {"description": "Not found"}}, -) - - -@router.get("/configuration/timezone") -async def get_timezone(): - """Get the timezone of the server""" - return system_actions.get_timezone() - - -class ChangeTimezoneRequestBody(BaseModel): - """Change the timezone of the server""" - - timezone: str - - -@router.put("/configuration/timezone") -async def change_timezone(timezone: ChangeTimezoneRequestBody): - """Change the timezone of the server""" - try: - system_actions.change_timezone(timezone.timezone) - except system_actions.InvalidTimezone as e: - raise HTTPException(status_code=400, detail=str(e)) - return {"timezone": timezone.timezone} - - -@router.get("/configuration/autoUpgrade") -async def get_auto_upgrade_settings(): - """Get the auto-upgrade settings""" - return system_actions.get_auto_upgrade_settings().dict() - - -class AutoUpgradeSettings(BaseModel): - """Settings for auto-upgrading user data""" - - enable: Optional[bool] = None - allowReboot: Optional[bool] = None - - -@router.put("/configuration/autoUpgrade") -async def set_auto_upgrade_settings(settings: AutoUpgradeSettings): - """Set the auto-upgrade settings""" - system_actions.set_auto_upgrade_settings(settings.enable, settings.allowReboot) - return "Auto-upgrade settings changed" - - -@router.get("/configuration/apply") -async def apply_configuration(): - """Apply the configuration""" - return_code = system_actions.rebuild_system() - return return_code - - -@router.get("/configuration/rollback") -async def rollback_configuration(): - """Rollback the configuration""" - return_code = system_actions.rollback_system() - return return_code - - -@router.get("/configuration/upgrade") -async def upgrade_configuration(): - """Upgrade the configuration""" - return_code = system_actions.upgrade_system() - return return_code - - -@router.get("/reboot") -async def reboot_system(): - """Reboot the system""" - system_actions.reboot_system() - return "System reboot has started" - - -@router.get("/version") -async def get_system_version(): - """Get the system version""" - return {"system_version": system_actions.get_system_version()} - - -@router.get("/pythonVersion") -async def get_python_version(): - """Get the Python version""" - return system_actions.get_python_version() - - -@router.get("/configuration/pull") -async def pull_configuration(): - """Pull the configuration""" - action_result = system_actions.pull_repository_changes() - if action_result.status == 0: - return action_result.dict() - raise HTTPException(status_code=500, detail=action_result.dict()) diff --git a/selfprivacy_api/rest/users.py b/selfprivacy_api/rest/users.py deleted file mode 100644 index ab4c6c9..0000000 --- a/selfprivacy_api/rest/users.py +++ /dev/null @@ -1,62 +0,0 @@ -"""Users management module""" -from typing import Optional -from fastapi import APIRouter, Body, Depends, HTTPException -from pydantic import BaseModel - -import selfprivacy_api.actions.users as users_actions - -from selfprivacy_api.dependencies import get_token_header - -router = APIRouter( - prefix="/users", - tags=["users"], - dependencies=[Depends(get_token_header)], - responses={404: {"description": "Not found"}}, -) - - -@router.get("") -async def get_users(withMainUser: bool = False): - """Get the list of users""" - users: list[users_actions.UserDataUser] = users_actions.get_users( - exclude_primary=not withMainUser, exclude_root=True - ) - - return [user.username for user in users] - - -class UserInput(BaseModel): - """User input""" - - username: str - password: str - - -@router.post("", status_code=201) -async def create_user(user: UserInput): - try: - users_actions.create_user(user.username, user.password) - except users_actions.PasswordIsEmpty as e: - raise HTTPException(status_code=400, detail=str(e)) - except users_actions.UsernameForbidden as e: - raise HTTPException(status_code=409, detail=str(e)) - except users_actions.UsernameNotAlphanumeric as e: - raise HTTPException(status_code=400, detail=str(e)) - except users_actions.UsernameTooLong as e: - raise HTTPException(status_code=400, detail=str(e)) - except users_actions.UserAlreadyExists as e: - raise HTTPException(status_code=409, detail=str(e)) - - return {"result": 0, "username": user.username} - - -@router.delete("/{username}") -async def delete_user(username: str): - try: - users_actions.delete_user(username) - except users_actions.UserNotFound as e: - raise HTTPException(status_code=404, detail=str(e)) - except users_actions.UserIsProtected as e: - raise HTTPException(status_code=400, detail=str(e)) - - return {"result": 0, "username": username} diff --git a/selfprivacy_api/services/__init__.py b/selfprivacy_api/services/__init__.py index 02bb1d3..dd0a5b4 100644 --- a/selfprivacy_api/services/__init__.py +++ b/selfprivacy_api/services/__init__.py @@ -3,7 +3,7 @@ import typing from selfprivacy_api.services.bitwarden import Bitwarden from selfprivacy_api.services.gitea import Gitea -from selfprivacy_api.services.jitsi import Jitsi +from selfprivacy_api.services.jitsimeet import JitsiMeet from selfprivacy_api.services.mailserver import MailServer from selfprivacy_api.services.nextcloud import Nextcloud from selfprivacy_api.services.pleroma import Pleroma @@ -18,7 +18,7 @@ services: list[Service] = [ Nextcloud(), Pleroma(), Ocserv(), - Jitsi(), + JitsiMeet(), ] @@ -54,12 +54,14 @@ def get_all_required_dns_records() -> list[ServiceDnsRecord]: name="api", content=ip4, ttl=3600, + display_name="SelfPrivacy API", ), ServiceDnsRecord( type="AAAA", name="api", content=ip6, ttl=3600, + display_name="SelfPrivacy API (IPv6)", ), ] for service in get_enabled_services(): diff --git a/selfprivacy_api/services/bitwarden/__init__.py b/selfprivacy_api/services/bitwarden/__init__.py index 2f695fd..1590729 100644 --- a/selfprivacy_api/services/bitwarden/__init__.py +++ b/selfprivacy_api/services/bitwarden/__init__.py @@ -58,11 +58,6 @@ class Bitwarden(Service): def get_backup_description() -> str: return "Password database, encryption certificate and attachments." - @staticmethod - def is_enabled() -> bool: - with ReadUserData() as user_data: - return user_data.get("bitwarden", {}).get("enable", False) - @staticmethod def get_status() -> ServiceStatus: """ @@ -76,22 +71,6 @@ class Bitwarden(Service): """ return get_service_status("vaultwarden.service") - @staticmethod - def enable(): - """Enable Bitwarden service.""" - with WriteUserData() as user_data: - if "bitwarden" not in user_data: - user_data["bitwarden"] = {} - user_data["bitwarden"]["enable"] = True - - @staticmethod - def disable(): - """Disable Bitwarden service.""" - with WriteUserData() as user_data: - if "bitwarden" not in user_data: - user_data["bitwarden"] = {} - user_data["bitwarden"]["enable"] = False - @staticmethod def stop(): subprocess.run(["systemctl", "stop", "vaultwarden.service"]) @@ -129,12 +108,14 @@ class Bitwarden(Service): name="password", content=network_utils.get_ip4(), ttl=3600, + display_name="Bitwarden", ), ServiceDnsRecord( type="AAAA", name="password", content=network_utils.get_ip6(), ttl=3600, + display_name="Bitwarden (IPv6)", ), ] diff --git a/selfprivacy_api/services/generic_service_mover.py b/selfprivacy_api/services/generic_service_mover.py index cfb0385..819b48e 100644 --- a/selfprivacy_api/services/generic_service_mover.py +++ b/selfprivacy_api/services/generic_service_mover.py @@ -244,9 +244,11 @@ def move_service( progress=95, ) with WriteUserData() as user_data: - if userdata_location not in user_data: - user_data[userdata_location] = {} - user_data[userdata_location]["location"] = volume.name + if "modules" not in user_data: + user_data["modules"] = {} + if userdata_location not in user_data["modules"]: + user_data["modules"][userdata_location] = {} + user_data["modules"][userdata_location]["location"] = volume.name # Start service service.start() Jobs.update( diff --git a/selfprivacy_api/services/gitea/__init__.py b/selfprivacy_api/services/gitea/__init__.py index fcb9ca7..9b6f80f 100644 --- a/selfprivacy_api/services/gitea/__init__.py +++ b/selfprivacy_api/services/gitea/__init__.py @@ -54,11 +54,6 @@ class Gitea(Service): def get_backup_description() -> str: return "Git repositories, database and user data." - @staticmethod - def is_enabled() -> bool: - with ReadUserData() as user_data: - return user_data.get("gitea", {}).get("enable", False) - @staticmethod def get_status() -> ServiceStatus: """ @@ -71,22 +66,6 @@ class Gitea(Service): """ return get_service_status("gitea.service") - @staticmethod - def enable(): - """Enable Gitea service.""" - with WriteUserData() as user_data: - if "gitea" not in user_data: - user_data["gitea"] = {} - user_data["gitea"]["enable"] = True - - @staticmethod - def disable(): - """Disable Gitea service.""" - with WriteUserData() as user_data: - if "gitea" not in user_data: - user_data["gitea"] = {} - user_data["gitea"]["enable"] = False - @staticmethod def stop(): subprocess.run(["systemctl", "stop", "gitea.service"]) @@ -123,12 +102,14 @@ class Gitea(Service): name="git", content=network_utils.get_ip4(), ttl=3600, + display_name="Gitea", ), ServiceDnsRecord( type="AAAA", name="git", content=network_utils.get_ip6(), ttl=3600, + display_name="Gitea (IPv6)", ), ] diff --git a/selfprivacy_api/services/jitsi/__init__.py b/selfprivacy_api/services/jitsimeet/__init__.py similarity index 76% rename from selfprivacy_api/services/jitsi/__init__.py rename to selfprivacy_api/services/jitsimeet/__init__.py index 2684fc3..30663f9 100644 --- a/selfprivacy_api/services/jitsi/__init__.py +++ b/selfprivacy_api/services/jitsimeet/__init__.py @@ -1,4 +1,4 @@ -"""Class representing Jitsi service""" +"""Class representing Jitsi Meet service""" import base64 import subprocess import typing @@ -11,26 +11,26 @@ from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceS from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain from selfprivacy_api.utils.block_devices import BlockDevice import selfprivacy_api.utils.network as network_utils -from selfprivacy_api.services.jitsi.icon import JITSI_ICON +from selfprivacy_api.services.jitsimeet.icon import JITSI_ICON -class Jitsi(Service): +class JitsiMeet(Service): """Class representing Jitsi service""" @staticmethod def get_id() -> str: """Return service id.""" - return "jitsi" + return "jitsi-meet" @staticmethod def get_display_name() -> str: """Return service display name.""" - return "Jitsi" + return "JitsiMeet" @staticmethod def get_description() -> str: """Return service description.""" - return "Jitsi is a free and open-source video conferencing solution." + return "Jitsi Meet is a free and open-source video conferencing solution." @staticmethod def get_svg_icon() -> str: @@ -55,33 +55,12 @@ class Jitsi(Service): def get_backup_description() -> str: return "Secrets that are used to encrypt the communication." - @staticmethod - def is_enabled() -> bool: - with ReadUserData() as user_data: - return user_data.get("jitsi", {}).get("enable", False) - @staticmethod def get_status() -> ServiceStatus: return get_service_status_from_several_units( ["jitsi-videobridge.service", "jicofo.service"] ) - @staticmethod - def enable(): - """Enable Jitsi service.""" - with WriteUserData() as user_data: - if "jitsi" not in user_data: - user_data["jitsi"] = {} - user_data["jitsi"]["enable"] = True - - @staticmethod - def disable(): - """Disable Gitea service.""" - with WriteUserData() as user_data: - if "jitsi" not in user_data: - user_data["jitsi"] = {} - user_data["jitsi"]["enable"] = False - @staticmethod def stop(): subprocess.run( @@ -132,14 +111,16 @@ class Jitsi(Service): name="meet", content=ip4, ttl=3600, + display_name="Jitsi", ), ServiceDnsRecord( type="AAAA", name="meet", content=ip6, ttl=3600, + display_name="Jitsi (IPv6)", ), ] def move_to_volume(self, volume: BlockDevice) -> Job: - raise NotImplementedError("jitsi service is not movable") + raise NotImplementedError("jitsi-meet service is not movable") diff --git a/selfprivacy_api/services/jitsi/icon.py b/selfprivacy_api/services/jitsimeet/icon.py similarity index 100% rename from selfprivacy_api/services/jitsi/icon.py rename to selfprivacy_api/services/jitsimeet/icon.py diff --git a/selfprivacy_api/services/mailserver/__init__.py b/selfprivacy_api/services/mailserver/__init__.py index d0f70eb..536b444 100644 --- a/selfprivacy_api/services/mailserver/__init__.py +++ b/selfprivacy_api/services/mailserver/__init__.py @@ -21,7 +21,7 @@ class MailServer(Service): @staticmethod def get_id() -> str: - return "email" + return "simple-nixos-mailserver" @staticmethod def get_display_name() -> str: @@ -121,27 +121,43 @@ class MailServer(Service): name=domain, content=ip4, ttl=3600, + display_name="Root Domain", ), ServiceDnsRecord( type="AAAA", name=domain, content=ip6, ttl=3600, + display_name="Root Domain (IPv6)", ), ServiceDnsRecord( - type="MX", name=domain, content=domain, ttl=3600, priority=10 + type="MX", + name=domain, + content=domain, + ttl=3600, + priority=10, + display_name="Mail server record", ), ServiceDnsRecord( - type="TXT", name="_dmarc", content="v=DMARC1; p=none", ttl=18000 + type="TXT", + name="_dmarc", + content="v=DMARC1; p=none", + ttl=18000, + display_name="DMARC record", ), ServiceDnsRecord( type="TXT", name=domain, content=f"v=spf1 a mx ip4:{ip4} -all", ttl=18000, + display_name="SPF record", ), ServiceDnsRecord( - type="TXT", name="selector._domainkey", content=dkim_record, ttl=18000 + type="TXT", + name="selector._domainkey", + content=dkim_record, + ttl=18000, + display_name="DKIM key", ), ] @@ -157,7 +173,7 @@ class MailServer(Service): volume, job, FolderMoveNames.default_foldermoves(self), - "email", + "simple-nixos-mailserver", ) return job diff --git a/selfprivacy_api/services/nextcloud/__init__.py b/selfprivacy_api/services/nextcloud/__init__.py index 4ac01af..0da6dd9 100644 --- a/selfprivacy_api/services/nextcloud/__init__.py +++ b/selfprivacy_api/services/nextcloud/__init__.py @@ -53,11 +53,6 @@ class Nextcloud(Service): def get_backup_description() -> str: return "All the files and other data stored in Nextcloud." - @staticmethod - def is_enabled() -> bool: - with ReadUserData() as user_data: - return user_data.get("nextcloud", {}).get("enable", False) - @staticmethod def get_status() -> ServiceStatus: """ @@ -71,22 +66,6 @@ class Nextcloud(Service): """ return get_service_status("phpfpm-nextcloud.service") - @staticmethod - def enable(): - """Enable Nextcloud service.""" - with WriteUserData() as user_data: - if "nextcloud" not in user_data: - user_data["nextcloud"] = {} - user_data["nextcloud"]["enable"] = True - - @staticmethod - def disable(): - """Disable Nextcloud service.""" - with WriteUserData() as user_data: - if "nextcloud" not in user_data: - user_data["nextcloud"] = {} - user_data["nextcloud"]["enable"] = False - @staticmethod def stop(): """Stop Nextcloud service.""" @@ -128,12 +107,14 @@ class Nextcloud(Service): name="cloud", content=network_utils.get_ip4(), ttl=3600, + display_name="Nextcloud", ), ServiceDnsRecord( type="AAAA", name="cloud", content=network_utils.get_ip6(), ttl=3600, + display_name="Nextcloud (IPv6)", ), ] diff --git a/selfprivacy_api/services/ocserv/__init__.py b/selfprivacy_api/services/ocserv/__init__.py index 98c6e97..a28358d 100644 --- a/selfprivacy_api/services/ocserv/__init__.py +++ b/selfprivacy_api/services/ocserv/__init__.py @@ -51,29 +51,10 @@ class Ocserv(Service): def get_backup_description() -> str: return "Nothing to backup." - @staticmethod - def is_enabled() -> bool: - with ReadUserData() as user_data: - return user_data.get("ocserv", {}).get("enable", False) - @staticmethod def get_status() -> ServiceStatus: return get_service_status("ocserv.service") - @staticmethod - def enable(): - with WriteUserData() as user_data: - if "ocserv" not in user_data: - user_data["ocserv"] = {} - user_data["ocserv"]["enable"] = True - - @staticmethod - def disable(): - with WriteUserData() as user_data: - if "ocserv" not in user_data: - user_data["ocserv"] = {} - user_data["ocserv"]["enable"] = False - @staticmethod def stop(): subprocess.run(["systemctl", "stop", "ocserv.service"], check=False) @@ -106,12 +87,14 @@ class Ocserv(Service): name="vpn", content=network_utils.get_ip4(), ttl=3600, + display_name="OpenConnect VPN", ), ServiceDnsRecord( type="AAAA", name="vpn", content=network_utils.get_ip6(), ttl=3600, + display_name="OpenConnect VPN (IPv6)", ), ] diff --git a/selfprivacy_api/services/pleroma/__init__.py b/selfprivacy_api/services/pleroma/__init__.py index d98b13f..1aae50e 100644 --- a/selfprivacy_api/services/pleroma/__init__.py +++ b/selfprivacy_api/services/pleroma/__init__.py @@ -50,29 +50,10 @@ class Pleroma(Service): def get_backup_description() -> str: return "Your Pleroma accounts, posts and media." - @staticmethod - def is_enabled() -> bool: - with ReadUserData() as user_data: - return user_data.get("pleroma", {}).get("enable", False) - @staticmethod def get_status() -> ServiceStatus: return get_service_status("pleroma.service") - @staticmethod - def enable(): - with WriteUserData() as user_data: - if "pleroma" not in user_data: - user_data["pleroma"] = {} - user_data["pleroma"]["enable"] = True - - @staticmethod - def disable(): - with WriteUserData() as user_data: - if "pleroma" not in user_data: - user_data["pleroma"] = {} - user_data["pleroma"]["enable"] = False - @staticmethod def stop(): subprocess.run(["systemctl", "stop", "pleroma.service"]) @@ -127,12 +108,14 @@ class Pleroma(Service): name="social", content=network_utils.get_ip4(), ttl=3600, + display_name="Pleroma", ), ServiceDnsRecord( type="AAAA", name="social", content=network_utils.get_ip6(), ttl=3600, + display_name="Pleroma (IPv6)", ), ] diff --git a/selfprivacy_api/services/service.py b/selfprivacy_api/services/service.py index b66bd19..f41c821 100644 --- a/selfprivacy_api/services/service.py +++ b/selfprivacy_api/services/service.py @@ -12,6 +12,7 @@ from selfprivacy_api.services.generic_size_counter import get_storage_usage from selfprivacy_api.services.owned_path import OwnedPath from selfprivacy_api import utils from selfprivacy_api.utils.waitloop import wait_until_true +from selfprivacy_api.utils import ReadUserData, WriteUserData, get_domain DEFAULT_START_STOP_TIMEOUT = 5 * 60 @@ -33,6 +34,7 @@ class ServiceDnsRecord(BaseModel): name: str content: str ttl: int + display_name: str priority: typing.Optional[int] = None @@ -124,11 +126,17 @@ class Service(ABC): """ pass - @staticmethod - @abstractmethod - def is_enabled() -> bool: - """`True` if the service is enabled.""" - pass + @classmethod + def is_enabled(cls) -> bool: + """ + `True` if the service is enabled. + `False` if it is not enabled or not defined in file + If there is nothing in the file, this is equivalent to False + because NixOS won't enable it then. + """ + name = cls.get_id() + with ReadUserData() as user_data: + return user_data.get("modules", {}).get(name, {}).get("enable", False) @staticmethod @abstractmethod @@ -136,17 +144,25 @@ class Service(ABC): """The status of the service, reported by systemd.""" pass - @staticmethod - @abstractmethod - def enable(): - """Enable the service. Usually this means enabling systemd unit.""" - pass + @classmethod + def _set_enable(cls, enable: bool): + name = cls.get_id() + with WriteUserData() as user_data: + if "modules" not in user_data: + user_data["modules"] = {} + if name not in user_data["modules"]: + user_data["modules"][name] = {} + user_data["modules"][name]["enable"] = enable - @staticmethod - @abstractmethod - def disable(): + @classmethod + def enable(cls): + """Enable the service. Usually this means enabling systemd unit.""" + cls._set_enable(True) + + @classmethod + def disable(cls): """Disable the service. Usually this means disabling systemd unit.""" - pass + cls._set_enable(False) @staticmethod @abstractmethod @@ -209,9 +225,13 @@ class Service(ABC): return root_device with utils.ReadUserData() as userdata: if userdata.get("useBinds", False): - return userdata.get(cls.get_id(), {}).get( - "location", - root_device, + return ( + userdata.get("modules", {}) + .get(cls.get_id(), {}) + .get( + "location", + root_device, + ) ) else: return root_device @@ -246,6 +266,8 @@ class Service(ABC): @abstractmethod def move_to_volume(self, volume: BlockDevice) -> Job: + """Cannot raise errors. + Returns errors as an errored out Job instead.""" pass @classmethod diff --git a/selfprivacy_api/services/test_service/__init__.py b/selfprivacy_api/services/test_service/__init__.py index 6ae33ef..1e315f5 100644 --- a/selfprivacy_api/services/test_service/__init__.py +++ b/selfprivacy_api/services/test_service/__init__.py @@ -8,9 +8,10 @@ from os import path # from enum import Enum -from selfprivacy_api.jobs import Job +from selfprivacy_api.jobs import Job, Jobs, JobStatus from selfprivacy_api.services.service import Service, ServiceDnsRecord, ServiceStatus from selfprivacy_api.utils.block_devices import BlockDevice +from selfprivacy_api.services.generic_service_mover import move_service, FolderMoveNames import selfprivacy_api.utils.network as network_utils from selfprivacy_api.services.test_service.icon import BITWARDEN_ICON @@ -22,16 +23,19 @@ class DummyService(Service): """A test service""" folders: List[str] = [] - startstop_delay = 0 + startstop_delay = 0.0 backuppable = True + movable = True + # if False, we try to actually move + simulate_moving = True + drive = "sda1" def __init_subclass__(cls, folders: List[str]): cls.folders = folders def __init__(self): super().__init__() - status_file = self.status_file() - with open(status_file, "w") as file: + with open(self.status_file(), "w") as file: file.write(ServiceStatus.ACTIVE.value) @staticmethod @@ -61,9 +65,9 @@ class DummyService(Service): domain = "test.com" return f"https://password.{domain}" - @staticmethod - def is_movable() -> bool: - return True + @classmethod + def is_movable(cls) -> bool: + return cls.movable @staticmethod def is_required() -> bool: @@ -73,10 +77,6 @@ class DummyService(Service): def get_backup_description() -> str: return "How did we get here?" - @staticmethod - def is_enabled() -> bool: - return True - @classmethod def status_file(cls) -> str: dir = cls.folders[0] @@ -116,22 +116,30 @@ class DummyService(Service): we can only set it up dynamically for tests via a classmethod""" cls.backuppable = new_value + @classmethod + def set_movable(cls, new_value: bool) -> None: + """For tests: because is_movale is static, + we can only set it up dynamically for tests via a classmethod""" + cls.movable = new_value + @classmethod def can_be_backed_up(cls) -> bool: """`True` if the service can be backed up.""" return cls.backuppable @classmethod - def enable(cls): - pass + def set_delay(cls, new_delay_sec: float) -> None: + cls.startstop_delay = new_delay_sec @classmethod - def disable(cls, delay): - pass + def set_drive(cls, new_drive: str) -> None: + cls.drive = new_drive @classmethod - def set_delay(cls, new_delay): - cls.startstop_delay = new_delay + def set_simulated_moves(cls, enabled: bool) -> None: + """If True, this service will not actually call moving code + when moved""" + cls.simulate_moving = enabled @classmethod def stop(cls): @@ -169,9 +177,9 @@ class DummyService(Service): storage_usage = 0 return storage_usage - @staticmethod - def get_drive() -> str: - return "sda1" + @classmethod + def get_drive(cls) -> str: + return cls.drive @classmethod def get_folders(cls) -> List[str]: @@ -186,14 +194,34 @@ class DummyService(Service): name="password", content=network_utils.get_ip4(), ttl=3600, + display_name="Test Service", ), ServiceDnsRecord( type="AAAA", name="password", content=network_utils.get_ip6(), ttl=3600, + display_name="Test Service (IPv6)", ), ] def move_to_volume(self, volume: BlockDevice) -> Job: - pass + job = Jobs.add( + type_id=f"services.{self.get_id()}.move", + name=f"Move {self.get_display_name()}", + description=f"Moving {self.get_display_name()} data to {volume.name}", + ) + if self.simulate_moving is False: + # completely generic code, TODO: make it the default impl. + move_service( + self, + volume, + job, + FolderMoveNames.default_foldermoves(self), + self.get_id(), + ) + else: + Jobs.update(job, status=JobStatus.FINISHED) + + self.set_drive(volume.name) + return job diff --git a/selfprivacy_api/utils/__init__.py b/selfprivacy_api/utils/__init__.py index 96bf9d8..779bdf6 100644 --- a/selfprivacy_api/utils/__init__.py +++ b/selfprivacy_api/utils/__init__.py @@ -6,27 +6,25 @@ import json import os import subprocess import portalocker +import typing -USERDATA_FILE = "/etc/nixos/userdata/userdata.json" -TOKENS_FILE = "/etc/nixos/userdata/tokens.json" -JOBS_FILE = "/etc/nixos/userdata/jobs.json" -DOMAIN_FILE = "/var/domain" +USERDATA_FILE = "/etc/nixos/userdata.json" +SECRETS_FILE = "/etc/selfprivacy/secrets.json" +DKIM_DIR = "/var/dkim/" class UserDataFiles(Enum): """Enum for userdata files""" USERDATA = 0 - TOKENS = 1 - JOBS = 2 + SECRETS = 3 def get_domain(): - """Get domain from /var/domain without trailing new line""" - with open(DOMAIN_FILE, "r", encoding="utf-8") as domain_file: - domain = domain_file.readline().rstrip() - return domain + """Get domain from userdata.json""" + with ReadUserData() as user_data: + return user_data["domain"] class WriteUserData(object): @@ -35,14 +33,12 @@ class WriteUserData(object): def __init__(self, file_type=UserDataFiles.USERDATA): if file_type == UserDataFiles.USERDATA: self.userdata_file = open(USERDATA_FILE, "r+", encoding="utf-8") - elif file_type == UserDataFiles.TOKENS: - self.userdata_file = open(TOKENS_FILE, "r+", encoding="utf-8") - elif file_type == UserDataFiles.JOBS: + elif file_type == UserDataFiles.SECRETS: # Make sure file exists - if not os.path.exists(JOBS_FILE): - with open(JOBS_FILE, "w", encoding="utf-8") as jobs_file: - jobs_file.write("{}") - self.userdata_file = open(JOBS_FILE, "r+", encoding="utf-8") + if not os.path.exists(SECRETS_FILE): + with open(SECRETS_FILE, "w", encoding="utf-8") as secrets_file: + secrets_file.write("{}") + self.userdata_file = open(SECRETS_FILE, "r+", encoding="utf-8") else: raise ValueError("Unknown file type") portalocker.lock(self.userdata_file, portalocker.LOCK_EX) @@ -66,14 +62,11 @@ class ReadUserData(object): def __init__(self, file_type=UserDataFiles.USERDATA): if file_type == UserDataFiles.USERDATA: self.userdata_file = open(USERDATA_FILE, "r", encoding="utf-8") - elif file_type == UserDataFiles.TOKENS: - self.userdata_file = open(TOKENS_FILE, "r", encoding="utf-8") - elif file_type == UserDataFiles.JOBS: - # Make sure file exists - if not os.path.exists(JOBS_FILE): - with open(JOBS_FILE, "w", encoding="utf-8") as jobs_file: - jobs_file.write("{}") - self.userdata_file = open(JOBS_FILE, "r", encoding="utf-8") + elif file_type == UserDataFiles.SECRETS: + if not os.path.exists(SECRETS_FILE): + with open(SECRETS_FILE, "w", encoding="utf-8") as secrets_file: + secrets_file.write("{}") + self.userdata_file = open(SECRETS_FILE, "r", encoding="utf-8") else: raise ValueError("Unknown file type") portalocker.lock(self.userdata_file, portalocker.LOCK_SH) @@ -88,10 +81,12 @@ class ReadUserData(object): def validate_ssh_public_key(key): - """Validate SSH public key. It may be ssh-ed25519 or ssh-rsa.""" + """Validate SSH public key. + It may be ssh-ed25519, ssh-rsa or ecdsa-sha2-nistp256.""" if not key.startswith("ssh-ed25519"): if not key.startswith("ssh-rsa"): - return False + if not key.startswith("ecdsa-sha2-nistp256"): + return False return True @@ -164,26 +159,31 @@ def parse_date(date_str: str) -> datetime.datetime: raise ValueError("Invalid date string") -def get_dkim_key(domain, parse=True): +def parse_dkim(dkim: str) -> str: + # extract key from file + dkim = dkim.split("(")[1] + dkim = dkim.split(")")[0] + # replace all quotes with nothing + dkim = dkim.replace('"', "") + # trim whitespace, remove newlines and tabs + dkim = dkim.strip() + dkim = dkim.replace("\n", "") + dkim = dkim.replace("\t", "") + # remove all redundant spaces + dkim = " ".join(dkim.split()) + return dkim + + +def get_dkim_key(domain: str, parse: bool = True) -> typing.Optional[str]: """Get DKIM key from /var/dkim/.selector.txt""" - if os.path.exists("/var/dkim/" + domain + ".selector.txt"): - cat_process = subprocess.Popen( - ["cat", "/var/dkim/" + domain + ".selector.txt"], stdout=subprocess.PIPE - ) - dkim = cat_process.communicate()[0] - if parse: - # Extract key from file - dkim = dkim.split(b"(")[1] - dkim = dkim.split(b")")[0] - # Replace all quotes with nothing - dkim = dkim.replace(b'"', b"") - # Trim whitespace, remove newlines and tabs - dkim = dkim.strip() - dkim = dkim.replace(b"\n", b"") - dkim = dkim.replace(b"\t", b"") - # Remove all redundant spaces - dkim = b" ".join(dkim.split()) - return str(dkim, "utf-8") + + dkim_path = os.path.join(DKIM_DIR, domain + ".selector.txt") + if os.path.exists(dkim_path): + with open(dkim_path, encoding="utf-8") as dkim_file: + dkim = dkim_file.read() + if parse: + dkim = parse_dkim(dkim) + return dkim return None diff --git a/selfprivacy_api/utils/block_devices.py b/selfprivacy_api/utils/block_devices.py index 83fc28f..ab3794d 100644 --- a/selfprivacy_api/utils/block_devices.py +++ b/selfprivacy_api/utils/block_devices.py @@ -1,4 +1,5 @@ -"""Wrapper for block device functions.""" +"""A block device API wrapping lsblk""" +from __future__ import annotations import subprocess import json import typing @@ -11,6 +12,7 @@ def get_block_device(device_name): """ Return a block device by name. """ + # TODO: remove the function and related tests: dublicated by singleton lsblk_output = subprocess.check_output( [ "lsblk", @@ -43,22 +45,37 @@ class BlockDevice: A block device. """ - def __init__(self, block_device): - self.name = block_device["name"] - self.path = block_device["path"] - self.fsavail = str(block_device["fsavail"]) - self.fssize = str(block_device["fssize"]) - self.fstype = block_device["fstype"] - self.fsused = str(block_device["fsused"]) - self.mountpoints = block_device["mountpoints"] - self.label = block_device["label"] - self.uuid = block_device["uuid"] - self.size = str(block_device["size"]) - self.model = block_device["model"] - self.serial = block_device["serial"] - self.type = block_device["type"] + def __init__(self, device_dict: dict): + self.update_from_dict(device_dict) + + def update_from_dict(self, device_dict: dict): + self.name = device_dict["name"] + self.path = device_dict["path"] + self.fsavail = str(device_dict["fsavail"]) + self.fssize = str(device_dict["fssize"]) + self.fstype = device_dict["fstype"] + self.fsused = str(device_dict["fsused"]) + self.mountpoints = device_dict["mountpoints"] + self.label = device_dict["label"] + self.uuid = device_dict["uuid"] + self.size = str(device_dict["size"]) + self.model = device_dict["model"] + self.serial = device_dict["serial"] + self.type = device_dict["type"] self.locked = False + self.children: typing.List[BlockDevice] = [] + if "children" in device_dict.keys(): + for child in device_dict["children"]: + self.children.append(BlockDevice(child)) + + def all_children(self) -> typing.List[BlockDevice]: + result = [] + for child in self.children: + result.extend(child.all_children()) + result.append(child) + return result + def __str__(self): return self.name @@ -82,17 +99,7 @@ class BlockDevice: Update current data and return a dictionary of stats. """ device = get_block_device(self.name) - self.fsavail = str(device["fsavail"]) - self.fssize = str(device["fssize"]) - self.fstype = device["fstype"] - self.fsused = str(device["fsused"]) - self.mountpoints = device["mountpoints"] - self.label = device["label"] - self.uuid = device["uuid"] - self.size = str(device["size"]) - self.model = device["model"] - self.serial = device["serial"] - self.type = device["type"] + self.update_from_dict(device) return { "name": self.name, @@ -110,6 +117,14 @@ class BlockDevice: "type": self.type, } + def is_usable_partition(self): + # Ignore devices with type "rom" + if self.type == "rom": + return False + if self.fstype == "ext4": + return True + return False + def resize(self): """ Resize the block device. @@ -165,41 +180,16 @@ class BlockDevices(metaclass=SingletonMetaclass): """ Update the list of block devices. """ - devices = [] - lsblk_output = subprocess.check_output( - [ - "lsblk", - "-J", - "-b", - "-o", - "NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINTS,LABEL,UUID,SIZE,MODEL,SERIAL,TYPE", - ] - ) - lsblk_output = lsblk_output.decode("utf-8") - lsblk_output = json.loads(lsblk_output) - for device in lsblk_output["blockdevices"]: - # Ignore devices with type "rom" - if device["type"] == "rom": - continue - # Ignore iso9660 devices - if device["fstype"] == "iso9660": - continue - if device["fstype"] is None: - if "children" in device: - for child in device["children"]: - if child["fstype"] == "ext4": - device = child - break - devices.append(device) - # Add new devices and delete non-existent devices + devices = BlockDevices.lsblk_devices() + + children = [] for device in devices: - if device["name"] not in [ - block_device.name for block_device in self.block_devices - ]: - self.block_devices.append(BlockDevice(device)) - for block_device in self.block_devices: - if block_device.name not in [device["name"] for device in devices]: - self.block_devices.remove(block_device) + children.extend(device.all_children()) + devices.extend(children) + + valid_devices = [device for device in devices if device.is_usable_partition()] + + self.block_devices = valid_devices def get_block_device(self, name: str) -> typing.Optional[BlockDevice]: """ @@ -236,3 +226,25 @@ class BlockDevices(metaclass=SingletonMetaclass): if "/" in block_device.mountpoints: return block_device raise RuntimeError("No root block device found") + + @staticmethod + def lsblk_device_dicts() -> typing.List[dict]: + lsblk_output_bytes = subprocess.check_output( + [ + "lsblk", + "-J", + "-b", + "-o", + "NAME,PATH,FSAVAIL,FSSIZE,FSTYPE,FSUSED,MOUNTPOINTS,LABEL,UUID,SIZE,MODEL,SERIAL,TYPE", + ] + ) + lsblk_output = lsblk_output_bytes.decode("utf-8") + return json.loads(lsblk_output)["blockdevices"] + + @staticmethod + def lsblk_devices() -> typing.List[BlockDevice]: + devices = [] + for device in BlockDevices.lsblk_device_dicts(): + devices.append(device) + + return [BlockDevice(device) for device in devices] diff --git a/selfprivacy_api/utils/huey.py b/selfprivacy_api/utils/huey.py index a7ff492..8e09446 100644 --- a/selfprivacy_api/utils/huey.py +++ b/selfprivacy_api/utils/huey.py @@ -2,14 +2,15 @@ import os from huey import SqliteHuey -HUEY_DATABASE = "/etc/nixos/userdata/tasks.db" +HUEY_DATABASE = "/etc/selfprivacy/tasks.db" # Singleton instance containing the huey database. test_mode = os.environ.get("TEST_MODE") huey = SqliteHuey( - HUEY_DATABASE, + "selfprivacy-api", + filename=HUEY_DATABASE if not test_mode else None, immediate=test_mode == "true", utc=True, ) diff --git a/selfprivacy_api/utils/redis_model_storage.py b/selfprivacy_api/utils/redis_model_storage.py index 51faff7..06dfe8c 100644 --- a/selfprivacy_api/utils/redis_model_storage.py +++ b/selfprivacy_api/utils/redis_model_storage.py @@ -1,11 +1,14 @@ from datetime import datetime from typing import Optional +from enum import Enum def store_model_as_hash(redis, redis_key, model): for key, value in model.dict().items(): if isinstance(value, datetime): value = value.isoformat() + if isinstance(value, Enum): + value = value.value redis.hset(redis_key, key, str(value)) diff --git a/selfprivacy_api/utils/timeutils.py b/selfprivacy_api/utils/timeutils.py new file mode 100644 index 0000000..b6494c6 --- /dev/null +++ b/selfprivacy_api/utils/timeutils.py @@ -0,0 +1,52 @@ +from datetime import datetime, timezone + + +def ensure_tz_aware(dt: datetime) -> datetime: + """ + returns timezone-aware datetime + assumes utc on naive datetime input + """ + if dt.tzinfo is None: + # astimezone() is dangerous, it makes an implicit assumption that + # the time is localtime + dt = dt.replace(tzinfo=timezone.utc) + return dt + + +def ensure_tz_aware_strict(dt: datetime) -> datetime: + """ + returns timezone-aware datetime + raises error if input is a naive datetime + """ + if dt.tzinfo is None: + raise ValueError( + "no timezone in datetime (tz-aware datetime is required for this operation)", + dt, + ) + return dt + + +def tzaware_parse_time(iso_timestamp: str) -> datetime: + """ + parse an iso8601 timestamp into timezone-aware datetime + assume utc if no timezone in stamp + example of timestamp: + 2023-11-10T12:07:47.868788+00:00 + + """ + dt = datetime.fromisoformat(iso_timestamp) + dt = ensure_tz_aware(dt) + return dt + + +def tzaware_parse_time_strict(iso_timestamp: str) -> datetime: + """ + parse an iso8601 timestamp into timezone-aware datetime + raise an error if no timezone in stamp + example of timestamp: + 2023-11-10T12:07:47.868788+00:00 + + """ + dt = datetime.fromisoformat(iso_timestamp) + dt = ensure_tz_aware_strict(dt) + return dt diff --git a/setup.py b/setup.py index 99f0679..36aa68e 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import setup, find_packages setup( name="selfprivacy_api", - version="2.3.1", + version="3.0.0", packages=find_packages(), scripts=[ "selfprivacy_api/app.py", diff --git a/shell.nix b/shell.nix index c211015..e69de29 100644 --- a/shell.nix +++ b/shell.nix @@ -1,49 +0,0 @@ -{ pkgs ? import { } }: -let - sp-python = pkgs.python310.withPackages (p: with p; [ - setuptools - portalocker - pytz - pytest - pytest-asyncio - pytest-mock - pytest-datadir - huey - gevent - mnemonic - coverage - pylint - rope - mypy - pylsp-mypy - pydantic - typing-extensions - psutil - black - fastapi - uvicorn - redis - strawberry-graphql - flake8-bugbear - flake8 - ]); -in -pkgs.mkShell { - buildInputs = [ - sp-python - pkgs.black - pkgs.redis - pkgs.restic - pkgs.rclone - ]; - shellHook = '' - PYTHONPATH=${sp-python}/${sp-python.sitePackages} - # envs set with export and as attributes are treated differently. - # for example. printenv will not fetch the value of an attribute. - export USE_REDIS_PORT=6379 - pkill redis-server - sleep 2 - setsid redis-server --bind 127.0.0.1 --port $USE_REDIS_PORT >/dev/null 2>/dev/null & - # maybe set more env-vars - ''; -} diff --git a/tests/common.py b/tests/common.py index e4a283d..ae3f0d0 100644 --- a/tests/common.py +++ b/tests/common.py @@ -1,6 +1,45 @@ import json +from datetime import datetime, timezone, timedelta from mnemonic import Mnemonic +# for expiration tests. If headache, consider freezegun +RECOVERY_KEY_VALIDATION_DATETIME = "selfprivacy_api.models.tokens.time.datetime" +DEVICE_KEY_VALIDATION_DATETIME = RECOVERY_KEY_VALIDATION_DATETIME + + +def ten_minutes_into_future_naive(): + return datetime.now() + timedelta(minutes=10) + + +def ten_minutes_into_future_naive_utc(): + return datetime.utcnow() + timedelta(minutes=10) + + +def ten_minutes_into_future(): + return datetime.now(timezone.utc) + timedelta(minutes=10) + + +def ten_minutes_into_past_naive(): + return datetime.now() - timedelta(minutes=10) + + +def ten_minutes_into_past_naive_utc(): + return datetime.utcnow() - timedelta(minutes=10) + + +def ten_minutes_into_past(): + return datetime.now(timezone.utc) - timedelta(minutes=10) + + +class NearFuture(datetime): + @classmethod + def now(cls, tz=None): + return datetime.now(tz) + timedelta(minutes=13) + + @classmethod + def utcnow(cls): + return datetime.utcnow() + timedelta(minutes=13) + def read_json(file_path): with open(file_path, "r", encoding="utf-8") as file: @@ -28,5 +67,15 @@ def generate_backup_query(query_array): return "query TestBackup {\n backup {" + "\n".join(query_array) + "}\n}" +def generate_service_query(query_array): + return "query TestService {\n services {" + "\n".join(query_array) + "}\n}" + + def mnemonic_to_hex(mnemonic): return Mnemonic(language="english").to_entropy(mnemonic).hex() + + +def assert_recovery_recent(time_generated: str): + assert datetime.fromisoformat(time_generated) - timedelta(seconds=5) < datetime.now( + timezone.utc + ) diff --git a/tests/conftest.py b/tests/conftest.py index 7e8ae11..e651c08 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -3,9 +3,57 @@ # pylint: disable=unused-argument import os import pytest -from os import path +import datetime +from os import path +from os import makedirs +from typing import Generator from fastapi.testclient import TestClient +from selfprivacy_api.models.tokens.token import Token + +from selfprivacy_api.utils.huey import huey + +import selfprivacy_api.services as services +from selfprivacy_api.services import get_service_by_id, Service +from selfprivacy_api.services.test_service import DummyService + +from selfprivacy_api.repositories.tokens.redis_tokens_repository import ( + RedisTokensRepository, +) + + +TESTFILE_BODY = "testytest!" +TESTFILE_2_BODY = "testissimo!" + +TOKENS_FILE_CONTENTS = { + "tokens": [ + { + "token": "TEST_TOKEN", + "name": "test_token", + "date": datetime.datetime(2022, 1, 14, 8, 31, 10, 789314), + }, + { + "token": "TEST_TOKEN2", + "name": "test_token2", + "date": datetime.datetime(2022, 1, 14, 8, 31, 10, 789314), + }, + ] +} + +TOKENS = [ + Token( + token="TEST_TOKEN", + device_name="test_token", + created_at=datetime.datetime(2022, 1, 14, 8, 31, 10, 789314), + ), + Token( + token="TEST_TOKEN2", + device_name="test_token2", + created_at=datetime.datetime(2022, 1, 14, 8, 31, 10, 789314), + ), +] + +DEVICE_WE_AUTH_TESTS_WITH = TOKENS_FILE_CONTENTS["tokens"][0] def pytest_generate_tests(metafunc): @@ -17,19 +65,22 @@ def global_data_dir(): @pytest.fixture -def tokens_file(mocker, shared_datadir): - """Mock tokens file.""" - mock = mocker.patch( - "selfprivacy_api.utils.TOKENS_FILE", shared_datadir / "tokens.json" - ) - return mock +def empty_redis_repo(): + repo = RedisTokensRepository() + repo.reset() + assert repo.get_tokens() == [] + return repo @pytest.fixture -def jobs_file(mocker, shared_datadir): - """Mock tokens file.""" - mock = mocker.patch("selfprivacy_api.utils.JOBS_FILE", shared_datadir / "jobs.json") - return mock +def redis_repo_with_tokens(): + repo = RedisTokensRepository() + repo.reset() + for token in TOKENS: + repo._store_token(token) + assert sorted(repo.get_tokens(), key=lambda x: x.token) == sorted( + TOKENS, key=lambda x: x.token + ) @pytest.fixture @@ -56,27 +107,75 @@ def huey_database(mocker, shared_datadir): @pytest.fixture -def client(tokens_file, huey_database, jobs_file): +def client(huey_database, redis_repo_with_tokens): from selfprivacy_api.app import app return TestClient(app) @pytest.fixture -def authorized_client(tokens_file, huey_database, jobs_file): +def authorized_client(huey_database, redis_repo_with_tokens): """Authorized test client fixture.""" from selfprivacy_api.app import app client = TestClient(app) - client.headers.update({"Authorization": "Bearer TEST_TOKEN"}) + client.headers.update( + {"Authorization": "Bearer " + DEVICE_WE_AUTH_TESTS_WITH["token"]} + ) return client @pytest.fixture -def wrong_auth_client(tokens_file, huey_database, jobs_file): +def wrong_auth_client(huey_database, redis_repo_with_tokens): """Wrong token test client fixture.""" from selfprivacy_api.app import app client = TestClient(app) client.headers.update({"Authorization": "Bearer WRONG_TOKEN"}) return client + + +@pytest.fixture() +def raw_dummy_service(tmpdir): + dirnames = ["test_service", "also_test_service"] + service_dirs = [] + for d in dirnames: + service_dir = path.join(tmpdir, d) + makedirs(service_dir) + service_dirs.append(service_dir) + + testfile_path_1 = path.join(service_dirs[0], "testfile.txt") + with open(testfile_path_1, "w") as file: + file.write(TESTFILE_BODY) + + testfile_path_2 = path.join(service_dirs[1], "testfile2.txt") + with open(testfile_path_2, "w") as file: + file.write(TESTFILE_2_BODY) + + # we need this to not change get_folders() much + class TestDummyService(DummyService, folders=service_dirs): + pass + + service = TestDummyService() + # assert pickle.dumps(service) is not None + return service + + +@pytest.fixture() +def dummy_service( + tmpdir, raw_dummy_service, generic_userdata +) -> Generator[Service, None, None]: + service = raw_dummy_service + + # register our service + services.services.append(service) + + huey.immediate = True + assert huey.immediate is True + + assert get_service_by_id(service.get_id()) is not None + service.enable() + yield service + + # cleanup because apparently it matters wrt tasks + services.services.remove(service) diff --git a/tests/test_graphql/test_system/domain b/tests/data/domain similarity index 100% rename from tests/test_graphql/test_system/domain rename to tests/data/domain diff --git a/tests/data/jobs.json b/tests/data/jobs.json deleted file mode 100644 index 0967ef4..0000000 --- a/tests/data/jobs.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/tests/data/tokens.json b/tests/data/tokens.json deleted file mode 100644 index 9be9d02..0000000 --- a/tests/data/tokens.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "tokens": [ - { - "token": "TEST_TOKEN", - "name": "test_token", - "date": "2022-01-14 08:31:10.789314" - }, - { - "token": "TEST_TOKEN2", - "name": "test_token2", - "date": "2022-01-14 08:31:10.789314" - } - ] -} \ No newline at end of file diff --git a/tests/data/turned_on.json b/tests/data/turned_on.json index c6b758b..badf57b 100644 --- a/tests/data/turned_on.json +++ b/tests/data/turned_on.json @@ -1,21 +1,76 @@ { - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false + "dns": { + "provider": "CLOUDFLARE", + "useStagingACME": false }, - "bitwarden": { - "enable": true + "server": { + "provider": "HETZNER" }, - "databasePassword": "PASSWORD", - "domain": "test.tld", + "domain": "test-domain.tld", "hashedMasterPassword": "HASHED_PASSWORD", "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true + "timezone": "Etc/UTC", + "username": "tester", + "useBinds": true, + "sshKeys": [ + "ssh-rsa KEY test@pc" + ], + "users": [ + { + "username": "user1", + "hashedPassword": "HASHED_PASSWORD_1", + "sshKeys": ["ssh-rsa KEY user1@pc"] + }, + { + "username": "user2", + "hashedPassword": "HASHED_PASSWORD_2", + "sshKeys": ["ssh-rsa KEY user2@pc"] + }, + { + "username": "user3", + "hashedPassword": "HASHED_PASSWORD_3", + "sshKeys": ["ssh-rsa KEY user3@pc"] + } + ], + "autoUpgrade": { + "enable": true, + "allowReboot": true }, - "resticPassword": "PASS", + "modules": { + "bitwarden": { + "enable": true, + "location": "sdb" + }, + "gitea": { + "enable": true, + "location": "sdb" + }, + "jitsi-meet": { + "enable": true + }, + "nextcloud": { + "enable": true, + "location": "sdb" + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true, + "location": "sdb" + }, + "simple-nixos-mailserver": { + "enable": true, + "location": "sdb" + } + }, + "volumes": [ + { + "device": "/dev/sdb", + "mountPoint": "/volumes/sdb", + "fsType": "ext4" + } + ], "ssh": { "enable": true, "passwordAuthentication": true, @@ -23,34 +78,6 @@ "ssh-ed25519 KEY test@pc" ] }, - "username": "tester", - "gitea": { - "enable": true - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "jitsi": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, "backup": { "provider": "BACKBLAZE", "accountId": "ID", diff --git a/tests/test_autobackup.py b/tests/test_autobackup.py new file mode 100644 index 0000000..63c625f --- /dev/null +++ b/tests/test_autobackup.py @@ -0,0 +1,538 @@ +import pytest +from copy import copy + +from datetime import datetime, timezone, timedelta + +from selfprivacy_api.jobs import Jobs +from selfprivacy_api.services import Service, get_all_services + +from selfprivacy_api.graphql.common_types.backup import ( + BackupReason, + AutobackupQuotas, +) + +from selfprivacy_api.backup import Backups, Snapshot +from selfprivacy_api.backup.tasks import ( + prune_autobackup_snapshots, +) + +from tests.test_backup import backups + + +def backuppable_services() -> list[Service]: + return [service for service in get_all_services() if service.can_be_backed_up()] + + +def dummy_snapshot(date: datetime): + return Snapshot( + id=str(hash(date)), + service_name="someservice", + created_at=date, + reason=BackupReason.EXPLICIT, + ) + + +def test_no_default_autobackup(backups, dummy_service): + now = datetime.now(timezone.utc) + assert not Backups.is_time_to_backup_service(dummy_service, now) + assert not Backups.is_time_to_backup(now) + + +# --------------------- Timing ------------------------- + + +def test_set_autobackup_period(backups): + assert Backups.autobackup_period_minutes() is None + + Backups.set_autobackup_period_minutes(2) + assert Backups.autobackup_period_minutes() == 2 + + Backups.disable_all_autobackup() + assert Backups.autobackup_period_minutes() is None + + Backups.set_autobackup_period_minutes(3) + assert Backups.autobackup_period_minutes() == 3 + + Backups.set_autobackup_period_minutes(0) + assert Backups.autobackup_period_minutes() is None + + Backups.set_autobackup_period_minutes(3) + assert Backups.autobackup_period_minutes() == 3 + + Backups.set_autobackup_period_minutes(-1) + assert Backups.autobackup_period_minutes() is None + + +def test_autobackup_timer_periods(backups, dummy_service): + now = datetime.now(timezone.utc) + backup_period = 13 # minutes + + assert not Backups.is_time_to_backup_service(dummy_service, now) + assert not Backups.is_time_to_backup(now) + + Backups.set_autobackup_period_minutes(backup_period) + assert Backups.is_time_to_backup_service(dummy_service, now) + assert Backups.is_time_to_backup(now) + + Backups.set_autobackup_period_minutes(0) + assert not Backups.is_time_to_backup_service(dummy_service, now) + assert not Backups.is_time_to_backup(now) + + +def test_autobackup_timer_enabling(backups, dummy_service): + now = datetime.now(timezone.utc) + backup_period = 13 # minutes + dummy_service.set_backuppable(False) + + Backups.set_autobackup_period_minutes(backup_period) + assert Backups.is_time_to_backup( + now + ) # there are other services too, not just our dummy + + # not backuppable service is not backuppable even if period is set + assert not Backups.is_time_to_backup_service(dummy_service, now) + + dummy_service.set_backuppable(True) + assert dummy_service.can_be_backed_up() + assert Backups.is_time_to_backup_service(dummy_service, now) + + Backups.disable_all_autobackup() + assert not Backups.is_time_to_backup_service(dummy_service, now) + assert not Backups.is_time_to_backup(now) + + +def test_autobackup_timing(backups, dummy_service): + backup_period = 13 # minutes + now = datetime.now(timezone.utc) + + Backups.set_autobackup_period_minutes(backup_period) + assert Backups.is_time_to_backup_service(dummy_service, now) + assert Backups.is_time_to_backup(now) + + Backups.back_up(dummy_service) + + now = datetime.now(timezone.utc) + assert not Backups.is_time_to_backup_service(dummy_service, now) + + past = datetime.now(timezone.utc) - timedelta(minutes=1) + assert not Backups.is_time_to_backup_service(dummy_service, past) + + future = datetime.now(timezone.utc) + timedelta(minutes=backup_period + 2) + assert Backups.is_time_to_backup_service(dummy_service, future) + + +# --------------------- What to autobackup and what not to -------------------- + + +def test_services_to_autobackup(backups, dummy_service): + backup_period = 13 # minutes + now = datetime.now(timezone.utc) + + dummy_service.set_backuppable(False) + services = Backups.services_to_back_up(now) + assert len(services) == 0 + + dummy_service.set_backuppable(True) + + services = Backups.services_to_back_up(now) + assert len(services) == 0 + + Backups.set_autobackup_period_minutes(backup_period) + + services = Backups.services_to_back_up(now) + assert len(services) == len(backuppable_services()) + assert dummy_service.get_id() in [ + service.get_id() for service in backuppable_services() + ] + + +def test_do_not_autobackup_disabled_services(backups, dummy_service): + now = datetime.now(timezone.utc) + Backups.set_autobackup_period_minutes(3) + assert Backups.is_time_to_backup_service(dummy_service, now) is True + + dummy_service.disable() + assert Backups.is_time_to_backup_service(dummy_service, now) is False + + +def test_failed_autoback_prevents_more_autobackup(backups, dummy_service): + backup_period = 13 # minutes + now = datetime.now(timezone.utc) + + Backups.set_autobackup_period_minutes(backup_period) + assert Backups.is_time_to_backup_service(dummy_service, now) + + # artificially making an errored out backup job + dummy_service.set_backuppable(False) + with pytest.raises(ValueError): + Backups.back_up(dummy_service) + dummy_service.set_backuppable(True) + + assert Backups.get_last_backed_up(dummy_service) is None + assert Backups.get_last_backup_error_time(dummy_service) is not None + + assert Backups.is_time_to_backup_service(dummy_service, now) is False + + +# --------------------- Quotas and Pruning ------------------------- + + +unlimited_quotas = AutobackupQuotas( + last=-1, + daily=-1, + weekly=-1, + monthly=-1, + yearly=-1, +) + +zero_quotas = AutobackupQuotas( + last=0, + daily=0, + weekly=0, + monthly=0, + yearly=0, +) + +unlimited_quotas = AutobackupQuotas( + last=-1, + daily=-1, + weekly=-1, + monthly=-1, + yearly=-1, +) + +zero_quotas = AutobackupQuotas( + last=0, + daily=0, + weekly=0, + monthly=0, + yearly=0, +) + + +def test_get_empty_quotas(backups): + quotas = Backups.autobackup_quotas() + assert quotas is not None + assert quotas == unlimited_quotas + + +def test_set_quotas(backups): + quotas = AutobackupQuotas( + last=3, + daily=2343, + weekly=343, + monthly=0, + yearly=-34556, + ) + Backups.set_autobackup_quotas(quotas) + assert Backups.autobackup_quotas() == AutobackupQuotas( + last=3, + daily=2343, + weekly=343, + monthly=0, + yearly=-1, + ) + + +def test_set_zero_quotas(backups): + quotas = AutobackupQuotas( + last=0, + daily=0, + weekly=0, + monthly=0, + yearly=0, + ) + Backups.set_autobackup_quotas(quotas) + assert Backups.autobackup_quotas() == zero_quotas + + +def test_set_unlimited_quotas(backups): + quotas = AutobackupQuotas( + last=-1, + daily=-1, + weekly=-1, + monthly=-1, + yearly=-1, + ) + Backups.set_autobackup_quotas(quotas) + assert Backups.autobackup_quotas() == unlimited_quotas + + +def test_set_zero_quotas_after_unlimited(backups): + quotas = AutobackupQuotas( + last=-1, + daily=-1, + weekly=-1, + monthly=-1, + yearly=-1, + ) + Backups.set_autobackup_quotas(quotas) + assert Backups.autobackup_quotas() == unlimited_quotas + + quotas = AutobackupQuotas( + last=0, + daily=0, + weekly=0, + monthly=0, + yearly=0, + ) + Backups.set_autobackup_quotas(quotas) + assert Backups.autobackup_quotas() == zero_quotas + + +def test_autobackup_snapshots_pruning(backups): + # Wednesday, fourth week + now = datetime(year=2023, month=1, day=25, hour=10) + + snaps = [ + dummy_snapshot(now), + dummy_snapshot(now - timedelta(minutes=5)), + dummy_snapshot(now - timedelta(hours=2)), + dummy_snapshot(now - timedelta(hours=5)), + dummy_snapshot(now - timedelta(days=1)), + dummy_snapshot(now - timedelta(days=1, hours=2)), + dummy_snapshot(now - timedelta(days=1, hours=3)), + dummy_snapshot(now - timedelta(days=2)), + dummy_snapshot(now - timedelta(days=7)), + dummy_snapshot(now - timedelta(days=12)), + dummy_snapshot(now - timedelta(days=23)), + dummy_snapshot(now - timedelta(days=28)), + dummy_snapshot(now - timedelta(days=32)), + dummy_snapshot(now - timedelta(days=47)), + dummy_snapshot(now - timedelta(days=64)), + dummy_snapshot(now - timedelta(days=84)), + dummy_snapshot(now - timedelta(days=104)), + dummy_snapshot(now - timedelta(days=365 * 2)), + ] + old_len = len(snaps) + + quotas = copy(unlimited_quotas) + Backups.set_autobackup_quotas(quotas) + assert Backups._prune_snaps_with_quotas(snaps) == snaps + + quotas = copy(zero_quotas) + quotas.last = 2 + quotas.daily = 2 + Backups.set_autobackup_quotas(quotas) + + snaps_to_keep = Backups._prune_snaps_with_quotas(snaps) + assert snaps_to_keep == [ + dummy_snapshot(now), + dummy_snapshot(now - timedelta(minutes=5)), + # dummy_snapshot(now - timedelta(hours=2)), + # dummy_snapshot(now - timedelta(hours=5)), + dummy_snapshot(now - timedelta(days=1)), + # dummy_snapshot(now - timedelta(days=1, hours=2)), + # dummy_snapshot(now - timedelta(days=1, hours=3)), + # dummy_snapshot(now - timedelta(days=2)), + # dummy_snapshot(now - timedelta(days=7)), + # dummy_snapshot(now - timedelta(days=12)), + # dummy_snapshot(now - timedelta(days=23)), + # dummy_snapshot(now - timedelta(days=28)), + # dummy_snapshot(now - timedelta(days=32)), + # dummy_snapshot(now - timedelta(days=47)), + # dummy_snapshot(now - timedelta(days=64)), + # dummy_snapshot(now - timedelta(days=84)), + # dummy_snapshot(now - timedelta(days=104)), + # dummy_snapshot(now - timedelta(days=365 * 2)), + ] + + # checking that this function does not mutate the argument + assert snaps != snaps_to_keep + assert len(snaps) == old_len + + quotas = copy(zero_quotas) + quotas.weekly = 4 + Backups.set_autobackup_quotas(quotas) + + snaps_to_keep = Backups._prune_snaps_with_quotas(snaps) + assert snaps_to_keep == [ + dummy_snapshot(now), + # dummy_snapshot(now - timedelta(minutes=5)), + # dummy_snapshot(now - timedelta(hours=2)), + # dummy_snapshot(now - timedelta(hours=5)), + # dummy_snapshot(now - timedelta(days=1)), + # dummy_snapshot(now - timedelta(days=1, hours=2)), + # dummy_snapshot(now - timedelta(days=1, hours=3)), + # dummy_snapshot(now - timedelta(days=2)), + dummy_snapshot(now - timedelta(days=7)), + dummy_snapshot(now - timedelta(days=12)), + dummy_snapshot(now - timedelta(days=23)), + # dummy_snapshot(now - timedelta(days=28)), + # dummy_snapshot(now - timedelta(days=32)), + # dummy_snapshot(now - timedelta(days=47)), + # dummy_snapshot(now - timedelta(days=64)), + # dummy_snapshot(now - timedelta(days=84)), + # dummy_snapshot(now - timedelta(days=104)), + # dummy_snapshot(now - timedelta(days=365 * 2)), + ] + + quotas = copy(zero_quotas) + quotas.monthly = 7 + Backups.set_autobackup_quotas(quotas) + + snaps_to_keep = Backups._prune_snaps_with_quotas(snaps) + assert snaps_to_keep == [ + dummy_snapshot(now), + # dummy_snapshot(now - timedelta(minutes=5)), + # dummy_snapshot(now - timedelta(hours=2)), + # dummy_snapshot(now - timedelta(hours=5)), + # dummy_snapshot(now - timedelta(days=1)), + # dummy_snapshot(now - timedelta(days=1, hours=2)), + # dummy_snapshot(now - timedelta(days=1, hours=3)), + # dummy_snapshot(now - timedelta(days=2)), + # dummy_snapshot(now - timedelta(days=7)), + # dummy_snapshot(now - timedelta(days=12)), + # dummy_snapshot(now - timedelta(days=23)), + dummy_snapshot(now - timedelta(days=28)), + # dummy_snapshot(now - timedelta(days=32)), + # dummy_snapshot(now - timedelta(days=47)), + dummy_snapshot(now - timedelta(days=64)), + # dummy_snapshot(now - timedelta(days=84)), + dummy_snapshot(now - timedelta(days=104)), + dummy_snapshot(now - timedelta(days=365 * 2)), + ] + + +def test_autobackup_snapshots_pruning_yearly(backups): + snaps = [ + dummy_snapshot(datetime(year=2055, month=3, day=1)), + dummy_snapshot(datetime(year=2055, month=2, day=1)), + dummy_snapshot(datetime(year=2023, month=4, day=1)), + dummy_snapshot(datetime(year=2023, month=3, day=1)), + dummy_snapshot(datetime(year=2023, month=2, day=1)), + dummy_snapshot(datetime(year=2021, month=2, day=1)), + ] + quotas = copy(zero_quotas) + quotas.yearly = 2 + Backups.set_autobackup_quotas(quotas) + + snaps_to_keep = Backups._prune_snaps_with_quotas(snaps) + assert snaps_to_keep == [ + dummy_snapshot(datetime(year=2055, month=3, day=1)), + dummy_snapshot(datetime(year=2023, month=4, day=1)), + ] + + +def test_autobackup_snapshots_pruning_bottleneck(backups): + now = datetime(year=2023, month=1, day=25, hour=10) + snaps = [ + dummy_snapshot(now), + dummy_snapshot(now - timedelta(minutes=5)), + dummy_snapshot(now - timedelta(hours=2)), + dummy_snapshot(now - timedelta(hours=3)), + dummy_snapshot(now - timedelta(hours=4)), + ] + + yearly_quota = copy(zero_quotas) + yearly_quota.yearly = 2 + + monthly_quota = copy(zero_quotas) + monthly_quota.monthly = 2 + + weekly_quota = copy(zero_quotas) + weekly_quota.weekly = 2 + + daily_quota = copy(zero_quotas) + daily_quota.daily = 2 + + last_quota = copy(zero_quotas) + last_quota.last = 1 + last_quota.yearly = 2 + + for quota in [last_quota, yearly_quota, monthly_quota, weekly_quota, daily_quota]: + print(quota) + Backups.set_autobackup_quotas(quota) + snaps_to_keep = Backups._prune_snaps_with_quotas(snaps) + assert snaps_to_keep == [ + dummy_snapshot(now), + # If there is a vacant quota, we should keep the last snapshot even if it doesn't fit + dummy_snapshot(now - timedelta(hours=4)), + ] + + +def test_autobackup_snapshots_pruning_edgeweek(backups): + # jan 1 2023 is Sunday + snaps = [ + dummy_snapshot(datetime(year=2023, month=1, day=6)), + dummy_snapshot(datetime(year=2023, month=1, day=1)), + dummy_snapshot(datetime(year=2022, month=12, day=31)), + dummy_snapshot(datetime(year=2022, month=12, day=30)), + ] + quotas = copy(zero_quotas) + quotas.weekly = 2 + Backups.set_autobackup_quotas(quotas) + + snaps_to_keep = Backups._prune_snaps_with_quotas(snaps) + assert snaps_to_keep == [ + dummy_snapshot(datetime(year=2023, month=1, day=6)), + dummy_snapshot(datetime(year=2023, month=1, day=1)), + ] + + +def test_autobackup_snapshots_pruning_big_gap(backups): + snaps = [ + dummy_snapshot(datetime(year=2023, month=1, day=6)), + dummy_snapshot(datetime(year=2023, month=1, day=2)), + dummy_snapshot(datetime(year=2022, month=10, day=31)), + dummy_snapshot(datetime(year=2022, month=10, day=30)), + ] + quotas = copy(zero_quotas) + quotas.weekly = 2 + Backups.set_autobackup_quotas(quotas) + + snaps_to_keep = Backups._prune_snaps_with_quotas(snaps) + assert snaps_to_keep == [ + dummy_snapshot(datetime(year=2023, month=1, day=6)), + dummy_snapshot(datetime(year=2022, month=10, day=31)), + ] + + +def test_quotas_exceeded_with_too_many_autobackups(backups, dummy_service): + assert Backups.autobackup_quotas() + quota = copy(zero_quotas) + quota.last = 2 + Backups.set_autobackup_quotas(quota) + assert Backups.autobackup_quotas().last == 2 + + snap = Backups.back_up(dummy_service, BackupReason.AUTO) + assert len(Backups.get_snapshots(dummy_service)) == 1 + snap2 = Backups.back_up(dummy_service, BackupReason.AUTO) + assert len(Backups.get_snapshots(dummy_service)) == 2 + snap3 = Backups.back_up(dummy_service, BackupReason.AUTO) + assert len(Backups.get_snapshots(dummy_service)) == 2 + + snaps = Backups.get_snapshots(dummy_service) + assert snap2 in snaps + assert snap3 in snaps + assert snap not in snaps + + quota.last = -1 + Backups.set_autobackup_quotas(quota) + snap4 = Backups.back_up(dummy_service, BackupReason.AUTO) + + snaps = Backups.get_snapshots(dummy_service) + assert len(snaps) == 3 + assert snap4 in snaps + + # Retroactivity + quota.last = 1 + Backups.set_autobackup_quotas(quota) + job = Jobs.add("trimming", "test.autobackup_trimming", "trimming the snaps!") + handle = prune_autobackup_snapshots(job) + handle(blocking=True) + snaps = Backups.get_snapshots(dummy_service) + assert len(snaps) == 1 + + snap5 = Backups.back_up(dummy_service, BackupReason.AUTO) + snaps = Backups.get_snapshots(dummy_service) + assert len(snaps) == 1 + assert snap5 in snaps + + # Explicit snaps are not affected + snap6 = Backups.back_up(dummy_service, BackupReason.EXPLICIT) + + snaps = Backups.get_snapshots(dummy_service) + assert len(snaps) == 2 + assert snap5 in snaps + assert snap6 in snaps diff --git a/tests/test_graphql/test_backup.py b/tests/test_backup.py similarity index 76% rename from tests/test_graphql/test_backup.py rename to tests/test_backup.py index d54af7b..f343feb 100644 --- a/tests/test_graphql/test_backup.py +++ b/tests/test_backup.py @@ -1,23 +1,25 @@ import pytest + import os import os.path as path -from os import makedirs from os import remove from os import listdir from os import urandom -from datetime import datetime, timedelta, timezone -from subprocess import Popen +from datetime import datetime, timedelta, timezone import tempfile -import selfprivacy_api.services as services -from selfprivacy_api.services import Service, get_all_services +from selfprivacy_api.utils.huey import huey + + from selfprivacy_api.services.service import ServiceStatus -from selfprivacy_api.services import get_service_by_id -from selfprivacy_api.services.test_service import DummyService from selfprivacy_api.graphql.queries.providers import BackupProvider -from selfprivacy_api.graphql.common_types.backup import RestoreStrategy +from selfprivacy_api.graphql.common_types.backup import ( + RestoreStrategy, + BackupReason, +) + from selfprivacy_api.jobs import Jobs, JobStatus from selfprivacy_api.models.backup.snapshot import Snapshot @@ -28,9 +30,6 @@ from selfprivacy_api.backup.providers import AbstractBackupProvider from selfprivacy_api.backup.providers.backblaze import Backblaze from selfprivacy_api.backup.providers.none import NoBackups from selfprivacy_api.backup.util import sync -from selfprivacy_api.backup.backuppers.restic_backupper import ResticBackupper -from selfprivacy_api.backup.jobs import add_backup_job, add_restore_job - from selfprivacy_api.backup.tasks import ( start_backup, @@ -38,16 +37,15 @@ from selfprivacy_api.backup.tasks import ( reload_snapshot_cache, ) from selfprivacy_api.backup.storage import Storage -from selfprivacy_api.backup.jobs import get_backup_job -TESTFILE_BODY = "testytest!" -TESTFILE_2_BODY = "testissimo!" REPO_NAME = "test_backup" +REPOFILE_NAME = "totallyunrelated" + def prepare_localfile_backups(temp_dir): - test_repo_path = path.join(temp_dir, "totallyunrelated") + test_repo_path = path.join(temp_dir, REPOFILE_NAME) assert not path.exists(test_repo_path) Backups.set_localfile_repo(test_repo_path) @@ -62,16 +60,24 @@ def backups_local(tmpdir): @pytest.fixture(scope="function") def backups(tmpdir): - # for those tests that are supposed to pass with any repo + """ + For those tests that are supposed to pass with + both local and cloud repos + """ + + # Sometimes this is false. Idk why. + huey.immediate = True + assert huey.immediate is True + Backups.reset() if BACKUP_PROVIDER_ENVS["kind"] in os.environ.keys(): Backups.set_provider_from_envs() else: prepare_localfile_backups(tmpdir) Jobs.reset() - # assert not repo_path Backups.init_repo() + assert Backups.provider().location == str(tmpdir) + "/" + REPOFILE_NAME yield Backups.erase_repo() @@ -81,45 +87,6 @@ def backups_backblaze(generic_userdata): Backups.reset(reset_json=False) -@pytest.fixture() -def raw_dummy_service(tmpdir): - dirnames = ["test_service", "also_test_service"] - service_dirs = [] - for d in dirnames: - service_dir = path.join(tmpdir, d) - makedirs(service_dir) - service_dirs.append(service_dir) - - testfile_path_1 = path.join(service_dirs[0], "testfile.txt") - with open(testfile_path_1, "w") as file: - file.write(TESTFILE_BODY) - - testfile_path_2 = path.join(service_dirs[1], "testfile2.txt") - with open(testfile_path_2, "w") as file: - file.write(TESTFILE_2_BODY) - - # we need this to not change get_folders() much - class TestDummyService(DummyService, folders=service_dirs): - pass - - service = TestDummyService() - return service - - -@pytest.fixture() -def dummy_service(tmpdir, backups, raw_dummy_service) -> Service: - service = raw_dummy_service - - # register our service - services.services.append(service) - - assert get_service_by_id(service.get_id()) is not None - yield service - - # cleanup because apparently it matters wrt tasks - services.services.remove(service) - - @pytest.fixture() def memory_backup() -> AbstractBackupProvider: ProviderClass = providers.get_provider(BackupProvider.MEMORY) @@ -242,16 +209,6 @@ def test_reinit_after_purge(backups): assert len(Backups.get_all_snapshots()) == 0 -def test_backup_simple_file(raw_dummy_service, file_backup): - # temporarily incomplete - service = raw_dummy_service - assert service is not None - assert file_backup is not None - - name = service.get_id() - file_backup.backupper.init() - - def test_backup_service(dummy_service, backups): id = dummy_service.get_id() assert_job_finished(f"services.{id}.backup", count=0) @@ -293,6 +250,16 @@ def test_backup_returns_snapshot(backups, dummy_service): assert Backups.get_snapshot_by_id(snapshot.id) is not None assert snapshot.service_name == name assert snapshot.created_at is not None + assert snapshot.reason == BackupReason.EXPLICIT + + +def test_backup_reasons(backups, dummy_service): + snap = Backups.back_up(dummy_service, BackupReason.AUTO) + assert snap.reason == BackupReason.AUTO + + Backups.force_snapshot_cache_reload() + snaps = Backups.get_snapshots(dummy_service) + assert snaps[0].reason == BackupReason.AUTO def folder_files(folder): @@ -404,7 +371,7 @@ def simulated_service_stopping_delay(request) -> float: def test_backup_service_task(backups, dummy_service, simulated_service_stopping_delay): dummy_service.set_delay(simulated_service_stopping_delay) - handle = start_backup(dummy_service) + handle = start_backup(dummy_service.get_id()) handle(blocking=True) snaps = Backups.get_snapshots(dummy_service) @@ -435,7 +402,10 @@ def test_forget_snapshot(backups, dummy_service): def test_forget_nonexistent_snapshot(backups, dummy_service): bogus = Snapshot( - id="gibberjibber", service_name="nohoho", created_at=datetime.now(timezone.utc) + id="gibberjibber", + service_name="nohoho", + created_at=datetime.now(timezone.utc), + reason=BackupReason.EXPLICIT, ) with pytest.raises(ValueError): Backups.forget_snapshot(bogus) @@ -446,7 +416,7 @@ def test_backup_larger_file(backups, dummy_service): mega = 2**20 make_large_file(dir, 100 * mega) - handle = start_backup(dummy_service) + handle = start_backup(dummy_service.get_id()) handle(blocking=True) # results will be slightly different on different machines. if someone has troubles with it on their machine, consider dropping this test. @@ -508,120 +478,17 @@ def test_restore_snapshot_task( snaps = Backups.get_snapshots(dummy_service) if restore_strategy == RestoreStrategy.INPLACE: assert len(snaps) == 2 + reasons = [snap.reason for snap in snaps] + assert BackupReason.PRE_RESTORE in reasons else: assert len(snaps) == 1 -def test_set_autobackup_period(backups): - assert Backups.autobackup_period_minutes() is None - - Backups.set_autobackup_period_minutes(2) - assert Backups.autobackup_period_minutes() == 2 - - Backups.disable_all_autobackup() - assert Backups.autobackup_period_minutes() is None - - Backups.set_autobackup_period_minutes(3) - assert Backups.autobackup_period_minutes() == 3 - - Backups.set_autobackup_period_minutes(0) - assert Backups.autobackup_period_minutes() is None - - Backups.set_autobackup_period_minutes(3) - assert Backups.autobackup_period_minutes() == 3 - - Backups.set_autobackup_period_minutes(-1) - assert Backups.autobackup_period_minutes() is None - - -def test_no_default_autobackup(backups, dummy_service): - now = datetime.now(timezone.utc) - assert not Backups.is_time_to_backup_service(dummy_service, now) - assert not Backups.is_time_to_backup(now) - - -def backuppable_services() -> list[Service]: - return [service for service in get_all_services() if service.can_be_backed_up()] - - -def test_services_to_back_up(backups, dummy_service): - backup_period = 13 # minutes - now = datetime.now(timezone.utc) - +def test_backup_unbackuppable(backups, dummy_service): dummy_service.set_backuppable(False) - services = Backups.services_to_back_up(now) - assert len(services) == 0 - - dummy_service.set_backuppable(True) - - services = Backups.services_to_back_up(now) - assert len(services) == 0 - - Backups.set_autobackup_period_minutes(backup_period) - - services = Backups.services_to_back_up(now) - assert len(services) == len(backuppable_services()) - assert dummy_service.get_id() in [ - service.get_id() for service in backuppable_services() - ] - - -def test_autobackup_timer_periods(backups, dummy_service): - now = datetime.now(timezone.utc) - backup_period = 13 # minutes - - assert not Backups.is_time_to_backup_service(dummy_service, now) - assert not Backups.is_time_to_backup(now) - - Backups.set_autobackup_period_minutes(backup_period) - assert Backups.is_time_to_backup_service(dummy_service, now) - assert Backups.is_time_to_backup(now) - - Backups.set_autobackup_period_minutes(0) - assert not Backups.is_time_to_backup_service(dummy_service, now) - assert not Backups.is_time_to_backup(now) - - -def test_autobackup_timer_enabling(backups, dummy_service): - now = datetime.now(timezone.utc) - backup_period = 13 # minutes - dummy_service.set_backuppable(False) - - Backups.set_autobackup_period_minutes(backup_period) - assert Backups.is_time_to_backup( - now - ) # there are other services too, not just our dummy - - # not backuppable service is not backuppable even if period is set - assert not Backups.is_time_to_backup_service(dummy_service, now) - - dummy_service.set_backuppable(True) - assert dummy_service.can_be_backed_up() - assert Backups.is_time_to_backup_service(dummy_service, now) - - Backups.disable_all_autobackup() - assert not Backups.is_time_to_backup_service(dummy_service, now) - assert not Backups.is_time_to_backup(now) - - -def test_autobackup_timing(backups, dummy_service): - backup_period = 13 # minutes - now = datetime.now(timezone.utc) - - Backups.set_autobackup_period_minutes(backup_period) - assert Backups.is_time_to_backup_service(dummy_service, now) - assert Backups.is_time_to_backup(now) - - Backups.back_up(dummy_service) - - now = datetime.now(timezone.utc) - assert not Backups.is_time_to_backup_service(dummy_service, now) - - past = datetime.now(timezone.utc) - timedelta(minutes=1) - assert not Backups.is_time_to_backup_service(dummy_service, past) - - future = datetime.now(timezone.utc) + timedelta(minutes=backup_period + 2) - assert Backups.is_time_to_backup_service(dummy_service, future) + assert dummy_service.can_be_backed_up() is False + with pytest.raises(ValueError): + Backups.back_up(dummy_service) # Storage diff --git a/tests/test_block_device_utils.py b/tests/test_block_device_utils.py index f821e96..41c30c8 100644 --- a/tests/test_block_device_utils.py +++ b/tests/test_block_device_utils.py @@ -67,7 +67,7 @@ def only_root_in_userdata(mocker, datadir): read_json(datadir / "only_root.json")["volumes"][0]["mountPoint"] == "/volumes/sda1" ) - assert read_json(datadir / "only_root.json")["volumes"][0]["filesystem"] == "ext4" + assert read_json(datadir / "only_root.json")["volumes"][0]["fsType"] == "ext4" return datadir @@ -416,32 +416,37 @@ def lsblk_full_mock(mocker): def test_get_block_devices(lsblk_full_mock, authorized_client): block_devices = BlockDevices().get_block_devices() assert len(block_devices) == 2 - assert block_devices[0].name == "sda1" - assert block_devices[0].path == "/dev/sda1" - assert block_devices[0].fsavail == "4605702144" - assert block_devices[0].fssize == "19814920192" - assert block_devices[0].fstype == "ext4" - assert block_devices[0].fsused == "14353719296" - assert block_devices[0].mountpoints == ["/nix/store", "/"] - assert block_devices[0].label is None - assert block_devices[0].uuid == "ec80c004-baec-4a2c-851d-0e1807135511" - assert block_devices[0].size == "20210236928" - assert block_devices[0].model is None - assert block_devices[0].serial is None - assert block_devices[0].type == "part" - assert block_devices[1].name == "sdb" - assert block_devices[1].path == "/dev/sdb" - assert block_devices[1].fsavail == "11888545792" - assert block_devices[1].fssize == "12573614080" - assert block_devices[1].fstype == "ext4" - assert block_devices[1].fsused == "24047616" - assert block_devices[1].mountpoints == ["/volumes/sdb"] - assert block_devices[1].label is None - assert block_devices[1].uuid == "fa9d0026-ee23-4047-b8b1-297ae16fa751" - assert block_devices[1].size == "12884901888" - assert block_devices[1].model == "Volume" - assert block_devices[1].serial == "21378102" - assert block_devices[1].type == "disk" + devices_by_name = {device.name: device for device in block_devices} + sda1 = devices_by_name["sda1"] + sdb = devices_by_name["sdb"] + + assert sda1.name == "sda1" + assert sda1.path == "/dev/sda1" + assert sda1.fsavail == "4605702144" + assert sda1.fssize == "19814920192" + assert sda1.fstype == "ext4" + assert sda1.fsused == "14353719296" + assert sda1.mountpoints == ["/nix/store", "/"] + assert sda1.label is None + assert sda1.uuid == "ec80c004-baec-4a2c-851d-0e1807135511" + assert sda1.size == "20210236928" + assert sda1.model is None + assert sda1.serial is None + assert sda1.type == "part" + + assert sdb.name == "sdb" + assert sdb.path == "/dev/sdb" + assert sdb.fsavail == "11888545792" + assert sdb.fssize == "12573614080" + assert sdb.fstype == "ext4" + assert sdb.fsused == "24047616" + assert sdb.mountpoints == ["/volumes/sdb"] + assert sdb.label is None + assert sdb.uuid == "fa9d0026-ee23-4047-b8b1-297ae16fa751" + assert sdb.size == "12884901888" + assert sdb.model == "Volume" + assert sdb.serial == "21378102" + assert sdb.type == "disk" def test_get_block_device(lsblk_full_mock, authorized_client): @@ -506,3 +511,30 @@ def test_get_root_block_device(lsblk_full_mock, authorized_client): assert block_device.model is None assert block_device.serial is None assert block_device.type == "part" + + +# Unassuming sanity check, yes this did fail +def test_get_real_devices(): + block_devices = BlockDevices().get_block_devices() + + assert block_devices is not None + assert len(block_devices) > 0 + + +# Unassuming sanity check +def test_get_real_root_device(): + devices = BlockDevices().get_block_devices() + try: + block_device = BlockDevices().get_root_block_device() + except Exception as e: + raise Exception("cannot get root device:", e, "devices found:", devices) + assert block_device is not None + assert block_device.name is not None + assert block_device.name != "" + + +def test_get_real_root_device_raw(authorized_client): + block_device = BlockDevices().get_root_block_device() + assert block_device is not None + assert block_device.name is not None + assert block_device.name != "" diff --git a/tests/test_block_device_utils/no_devices.json b/tests/test_block_device_utils/no_devices.json index c395b21..b23d99f 100644 --- a/tests/test_block_device_utils/no_devices.json +++ b/tests/test_block_device_utils/no_devices.json @@ -1,59 +1,59 @@ { - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false + "dns": { + "provider": "CLOUDFLARE", + "useStagingACME": false }, - "bitwarden": { - "enable": true + "server": { + "provider": "HETZNER" }, - "databasePassword": "PASSWORD", - "domain": "test.tld", + "domain": "test-domain.tld", "hashedMasterPassword": "HASHED_PASSWORD", "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true + "timezone": "Etc/UTC", + "username": "tester", + "useBinds": true, + "sshKeys": [ + "ssh-rsa KEY test@pc" + ], + "users": [], + "autoUpgrade": { + "enable": true, + "allowReboot": true }, - "resticPassword": "PASS", + "modules": { + "bitwarden": { + "enable": true, + "location": "sdb" + }, + "gitea": { + "enable": true, + "location": "sdb" + }, + "jitsi-meet": { + "enable": true + }, + "nextcloud": { + "enable": true, + "location": "sdb" + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true, + "location": "sdb" + }, + "simple-nixos-mailserver": { + "enable": true, + "location": "sdb" + } + }, + "volumes": [], "ssh": { "enable": true, "passwordAuthentication": true, "rootKeys": [ "ssh-ed25519 KEY test@pc" ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - }, - "volumes": [ - ] + } } diff --git a/tests/test_block_device_utils/only_root.json b/tests/test_block_device_utils/only_root.json index 1026ed0..ab4a196 100644 --- a/tests/test_block_device_utils/only_root.json +++ b/tests/test_block_device_utils/only_root.json @@ -1,64 +1,65 @@ { - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false + "dns": { + "provider": "CLOUDFLARE", + "useStagingACME": false }, - "bitwarden": { - "enable": true + "server": { + "provider": "HETZNER" }, - "databasePassword": "PASSWORD", - "domain": "test.tld", + "domain": "test-domain.tld", "hashedMasterPassword": "HASHED_PASSWORD", "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true + "timezone": "Etc/UTC", + "username": "tester", + "useBinds": true, + "sshKeys": [ + "ssh-rsa KEY test@pc" + ], + "users": [], + "autoUpgrade": { + "enable": true, + "allowReboot": true }, - "resticPassword": "PASS", + "modules": { + "bitwarden": { + "enable": true, + "location": "sdb" + }, + "gitea": { + "enable": true, + "location": "sdb" + }, + "jitsi-meet": { + "enable": true + }, + "nextcloud": { + "enable": true, + "location": "sdb" + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true, + "location": "sdb" + }, + "simple-nixos-mailserver": { + "enable": true, + "location": "sdb" + } + }, + "volumes": [ + { + "device": "/dev/sda1", + "mountPoint": "/volumes/sda1", + "fsType": "ext4" + } + ], "ssh": { "enable": true, "passwordAuthentication": true, "rootKeys": [ "ssh-ed25519 KEY test@pc" ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "volumes": [ - { - "device": "/dev/sda1", - "mountPoint": "/volumes/sda1", - "filesystem": "ext4" - } - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" } } diff --git a/tests/test_block_device_utils/undefined.json b/tests/test_block_device_utils/undefined.json index f5edda8..21acd70 100644 --- a/tests/test_block_device_utils/undefined.json +++ b/tests/test_block_device_utils/undefined.json @@ -1,57 +1,58 @@ { - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false + "dns": { + "provider": "CLOUDFLARE", + "useStagingACME": false }, - "bitwarden": { - "enable": true + "server": { + "provider": "HETZNER" }, - "databasePassword": "PASSWORD", - "domain": "test.tld", + "domain": "test-domain.tld", "hashedMasterPassword": "HASHED_PASSWORD", "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true + "timezone": "Etc/UTC", + "username": "tester", + "useBinds": true, + "sshKeys": [ + "ssh-rsa KEY test@pc" + ], + "users": [], + "autoUpgrade": { + "enable": true, + "allowReboot": true + }, + "modules": { + "bitwarden": { + "enable": true, + "location": "sdb" + }, + "gitea": { + "enable": true, + "location": "sdb" + }, + "jitsi-meet": { + "enable": true + }, + "nextcloud": { + "enable": true, + "location": "sdb" + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true, + "location": "sdb" + }, + "simple-nixos-mailserver": { + "enable": true, + "location": "sdb" + } }, - "resticPassword": "PASS", "ssh": { "enable": true, "passwordAuthentication": true, "rootKeys": [ "ssh-ed25519 KEY test@pc" ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" } } diff --git a/tests/test_common.py b/tests/test_common.py index e5d3f62..7dd3652 100644 --- a/tests/test_common.py +++ b/tests/test_common.py @@ -1,6 +1,5 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument -import json import os import pytest diff --git a/tests/test_dkim.py b/tests/test_dkim.py new file mode 100644 index 0000000..0adf0a7 --- /dev/null +++ b/tests/test_dkim.py @@ -0,0 +1,52 @@ +import pytest + +import os +from os import path +from tests.conftest import global_data_dir + +from selfprivacy_api.utils import get_dkim_key, get_domain + +############################################################################### + +DKIM_FILE_CONTENT = b'selector._domainkey\tIN\tTXT\t( "v=DKIM1; k=rsa; "\n\t "p=MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDNn/IhEz1SxgHxxxI8vlPYC2dNueiLe1GC4SYz8uHimC8SDkMvAwm7rqi2SimbFgGB5nccCNOqCkrIqJTCB9vufqBnVKAjshHqpOr5hk4JJ1T/AGQKWinstmDbfTLPYTbU8ijZrwwGeqQLlnXR5nSN0GB9GazheA9zaPsT6PV+aQIDAQAB" ) ; ----- DKIM key selector for test-domain.tld\n' + + +@pytest.fixture +def dkim_file(mocker, tmpdir, generic_userdata): + domain = get_domain() + assert domain is not None + assert domain != "" + + filename = domain + ".selector.txt" + dkim_path = path.join(tmpdir, filename) + + with open(dkim_path, "wb") as file: + file.write(DKIM_FILE_CONTENT) + + mocker.patch("selfprivacy_api.utils.DKIM_DIR", tmpdir) + return dkim_path + + +@pytest.fixture +def no_dkim_file(dkim_file): + os.remove(dkim_file) + assert path.exists(dkim_file) is False + return dkim_file + + +############################################################################### + + +def test_get_dkim_key(dkim_file): + """Test DKIM key""" + dkim_key = get_dkim_key("test-domain.tld") + assert ( + dkim_key + == "v=DKIM1; k=rsa; p=MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDNn/IhEz1SxgHxxxI8vlPYC2dNueiLe1GC4SYz8uHimC8SDkMvAwm7rqi2SimbFgGB5nccCNOqCkrIqJTCB9vufqBnVKAjshHqpOr5hk4JJ1T/AGQKWinstmDbfTLPYTbU8ijZrwwGeqQLlnXR5nSN0GB9GazheA9zaPsT6PV+aQIDAQAB" + ) + + +def test_no_dkim_key(no_dkim_file): + """Test no DKIM key""" + dkim_key = get_dkim_key("test-domain.tld") + assert dkim_key is None diff --git a/tests/test_graphql/common.py b/tests/test_graphql/common.py new file mode 100644 index 0000000..5e6dc04 --- /dev/null +++ b/tests/test_graphql/common.py @@ -0,0 +1,96 @@ +from tests.common import generate_api_query +from tests.conftest import TOKENS_FILE_CONTENTS, DEVICE_WE_AUTH_TESTS_WITH + +ORIGINAL_DEVICES = TOKENS_FILE_CONTENTS["tokens"] + + +def assert_ok(output: dict, code=200) -> None: + if output["success"] is False: + # convenience for debugging, this should display error + # if message is empty, consider adding helpful messages + raise ValueError(output["code"], output["message"]) + assert output["success"] is True + assert output["message"] is not None + assert output["code"] == code + + +def assert_errorcode(output: dict, code) -> None: + assert output["success"] is False + assert output["message"] is not None + assert output["code"] == code + + +def assert_empty(response): + assert response.status_code == 200 + assert response.json().get("data") is None + + +def get_data(response): + assert response.status_code == 200 + response = response.json() + + if ( + "errors" in response.keys() + ): # convenience for debugging, this will display error + raise ValueError(response["errors"]) + data = response.get("data") + assert data is not None + return data + + +API_DEVICES_QUERY = """ +devices { + creationDate + isCaller + name +} +""" + + +def request_devices(client): + return client.post( + "/graphql", + json={"query": generate_api_query([API_DEVICES_QUERY])}, + ) + + +def graphql_get_devices(client): + response = request_devices(client) + data = get_data(response) + devices = data["api"]["devices"] + assert devices is not None + return devices + + +def set_client_token(client, token): + client.headers.update({"Authorization": "Bearer " + token}) + + +def assert_token_valid(client, token): + set_client_token(client, token) + assert graphql_get_devices(client) is not None + + +def assert_same(graphql_devices, abstract_devices): + """Orderless comparison""" + assert len(graphql_devices) == len(abstract_devices) + for original_device in abstract_devices: + assert original_device["name"] in [device["name"] for device in graphql_devices] + for device in graphql_devices: + if device["name"] == original_device["name"]: + assert device["creationDate"] == original_device["date"].isoformat() + + +def assert_original(client): + devices = graphql_get_devices(client) + assert_original_devices(devices) + + +def assert_original_devices(devices): + assert_same(devices, ORIGINAL_DEVICES) + + for device in devices: + if device["name"] == DEVICE_WE_AUTH_TESTS_WITH["name"]: + assert device["isCaller"] is True + else: + assert device["isCaller"] is False diff --git a/selfprivacy_api/rest/__init__.py b/tests/test_graphql/data/gitkeep similarity index 100% rename from selfprivacy_api/rest/__init__.py rename to tests/test_graphql/data/gitkeep diff --git a/tests/test_graphql/data/tokens.json b/tests/test_graphql/data/tokens.json deleted file mode 100644 index 9be9d02..0000000 --- a/tests/test_graphql/data/tokens.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "tokens": [ - { - "token": "TEST_TOKEN", - "name": "test_token", - "date": "2022-01-14 08:31:10.789314" - }, - { - "token": "TEST_TOKEN2", - "name": "test_token2", - "date": "2022-01-14 08:31:10.789314" - } - ] -} \ No newline at end of file diff --git a/tests/test_graphql/test_api.py b/tests/test_graphql/test_api.py index 16c7c4d..af04685 100644 --- a/tests/test_graphql/test_api.py +++ b/tests/test_graphql/test_api.py @@ -3,27 +3,13 @@ # pylint: disable=missing-function-docstring from tests.common import generate_api_query +from tests.test_graphql.common import assert_original_devices from tests.test_graphql.test_api_devices import API_DEVICES_QUERY from tests.test_graphql.test_api_recovery import API_RECOVERY_QUERY from tests.test_graphql.test_api_version import API_VERSION_QUERY -TOKENS_FILE_CONTETS = { - "tokens": [ - { - "token": "TEST_TOKEN", - "name": "test_token", - "date": "2022-01-14 08:31:10.789314", - }, - { - "token": "TEST_TOKEN2", - "name": "test_token2", - "date": "2022-01-14 08:31:10.789314", - }, - ] -} - -def test_graphql_get_entire_api_data(authorized_client, tokens_file): +def test_graphql_get_entire_api_data(authorized_client): response = authorized_client.post( "/graphql", json={ @@ -35,20 +21,11 @@ def test_graphql_get_entire_api_data(authorized_client, tokens_file): assert response.status_code == 200 assert response.json().get("data") is not None assert "version" in response.json()["data"]["api"] - assert response.json()["data"]["api"]["devices"] is not None - assert len(response.json()["data"]["api"]["devices"]) == 2 - assert ( - response.json()["data"]["api"]["devices"][0]["creationDate"] - == "2022-01-14T08:31:10.789314" - ) - assert response.json()["data"]["api"]["devices"][0]["isCaller"] is True - assert response.json()["data"]["api"]["devices"][0]["name"] == "test_token" - assert ( - response.json()["data"]["api"]["devices"][1]["creationDate"] - == "2022-01-14T08:31:10.789314" - ) - assert response.json()["data"]["api"]["devices"][1]["isCaller"] is False - assert response.json()["data"]["api"]["devices"][1]["name"] == "test_token2" + + devices = response.json()["data"]["api"]["devices"] + assert devices is not None + assert_original_devices(devices) + assert response.json()["data"]["api"]["recoveryKey"] is not None assert response.json()["data"]["api"]["recoveryKey"]["exists"] is False assert response.json()["data"]["api"]["recoveryKey"]["valid"] is False diff --git a/tests/test_graphql/test_api_backup.py b/tests/test_graphql/test_api_backup.py index e53ce2a..18d5d15 100644 --- a/tests/test_graphql/test_api_backup.py +++ b/tests/test_graphql/test_api_backup.py @@ -1,9 +1,13 @@ from os import path -from tests.test_graphql.test_backup import dummy_service, backups, raw_dummy_service +from tests.test_backup import backups from tests.common import generate_backup_query from selfprivacy_api.graphql.common_types.service import service_to_graphql_service +from selfprivacy_api.graphql.common_types.backup import ( + _AutobackupQuotas, + AutobackupQuotas, +) from selfprivacy_api.jobs import Jobs, JobStatus API_RELOAD_SNAPSHOTS = """ @@ -38,6 +42,34 @@ mutation TestAutobackupPeriod($period: Int) { } """ + +API_SET_AUTOBACKUP_QUOTAS_MUTATION = """ +mutation TestAutobackupQuotas($input: AutobackupQuotasInput!) { + backup { + setAutobackupQuotas(quotas: $input) { + success + message + code + configuration { + provider + encryptionKey + isInitialized + autobackupPeriod + locationName + locationId + autobackupQuotas { + last + daily + weekly + monthly + yearly + } + } + } + } +} +""" + API_REMOVE_REPOSITORY_MUTATION = """ mutation TestRemoveRepo { backup { @@ -113,6 +145,7 @@ allSnapshots { id } createdAt + reason } """ @@ -177,6 +210,17 @@ def api_set_period(authorized_client, period): return response +def api_set_quotas(authorized_client, quotas: _AutobackupQuotas): + response = authorized_client.post( + "/graphql", + json={ + "query": API_SET_AUTOBACKUP_QUOTAS_MUTATION, + "variables": {"input": quotas.dict()}, + }, + ) + return response + + def api_remove(authorized_client): response = authorized_client.post( "/graphql", @@ -221,6 +265,10 @@ def api_init_without_key( def assert_ok(data): + if data["success"] is False: + # convenience for debugging, this should display error + # if empty, consider adding helpful messages + raise ValueError(data["code"], data["message"]) assert data["code"] == 200 assert data["success"] is True @@ -231,7 +279,7 @@ def get_data(response): if ( "errors" in response.keys() ): # convenience for debugging, this will display error - assert response["errors"] == [] + raise ValueError(response["errors"]) assert response["data"] is not None data = response["data"] return data @@ -253,12 +301,12 @@ def test_dummy_service_convertible_to_gql(dummy_service): assert gql_service is not None -def test_snapshots_empty(authorized_client, dummy_service): +def test_snapshots_empty(authorized_client, dummy_service, backups): snaps = api_snapshots(authorized_client) assert snaps == [] -def test_start_backup(authorized_client, dummy_service): +def test_start_backup(authorized_client, dummy_service, backups): response = api_backup(authorized_client, dummy_service) data = get_data(response)["backup"]["startBackup"] assert data["success"] is True @@ -274,7 +322,7 @@ def test_start_backup(authorized_client, dummy_service): assert snap["service"]["id"] == "testservice" -def test_restore(authorized_client, dummy_service): +def test_restore(authorized_client, dummy_service, backups): api_backup(authorized_client, dummy_service) snap = api_snapshots(authorized_client)[0] assert snap["id"] is not None @@ -287,7 +335,7 @@ def test_restore(authorized_client, dummy_service): assert Jobs.get_job(job["uid"]).status == JobStatus.FINISHED -def test_reinit(authorized_client, dummy_service, tmpdir): +def test_reinit(authorized_client, dummy_service, tmpdir, backups): test_repo_path = path.join(tmpdir, "not_at_all_sus") response = api_init_without_key( authorized_client, "FILE", "", "", test_repo_path, "" @@ -309,7 +357,7 @@ def test_reinit(authorized_client, dummy_service, tmpdir): assert Jobs.get_job(job["uid"]).status == JobStatus.FINISHED -def test_remove(authorized_client, generic_userdata): +def test_remove(authorized_client, generic_userdata, backups): response = api_remove(authorized_client) data = get_data(response)["backup"]["removeRepository"] assert_ok(data) @@ -323,7 +371,23 @@ def test_remove(authorized_client, generic_userdata): assert configuration["isInitialized"] is False -def test_autobackup_period_nonzero(authorized_client): +def test_autobackup_quotas_nonzero(authorized_client, backups): + quotas = _AutobackupQuotas( + last=3, + daily=2, + weekly=4, + monthly=13, + yearly=14, + ) + response = api_set_quotas(authorized_client, quotas) + data = get_data(response)["backup"]["setAutobackupQuotas"] + assert_ok(data) + + configuration = data["configuration"] + assert configuration["autobackupQuotas"] == quotas + + +def test_autobackup_period_nonzero(authorized_client, backups): new_period = 11 response = api_set_period(authorized_client, new_period) data = get_data(response)["backup"]["setAutobackupPeriod"] @@ -333,7 +397,7 @@ def test_autobackup_period_nonzero(authorized_client): assert configuration["autobackupPeriod"] == new_period -def test_autobackup_period_zero(authorized_client): +def test_autobackup_period_zero(authorized_client, backups): new_period = 0 # since it is none by default, we better first set it to something non-negative response = api_set_period(authorized_client, 11) @@ -346,7 +410,7 @@ def test_autobackup_period_zero(authorized_client): assert configuration["autobackupPeriod"] == None -def test_autobackup_period_none(authorized_client): +def test_autobackup_period_none(authorized_client, backups): # since it is none by default, we better first set it to something non-negative response = api_set_period(authorized_client, 11) # and now we nullify it @@ -358,7 +422,7 @@ def test_autobackup_period_none(authorized_client): assert configuration["autobackupPeriod"] == None -def test_autobackup_period_negative(authorized_client): +def test_autobackup_period_negative(authorized_client, backups): # since it is none by default, we better first set it to something non-negative response = api_set_period(authorized_client, 11) # and now we nullify it @@ -372,7 +436,7 @@ def test_autobackup_period_negative(authorized_client): # We cannot really check the effect at this level, we leave it to backend tests # But we still make it run in both empty and full scenarios and ask for snaps afterwards -def test_reload_snapshots_bare_bare_bare(authorized_client, dummy_service): +def test_reload_snapshots_bare_bare_bare(authorized_client, dummy_service, backups): api_remove(authorized_client) response = api_reload_snapshots(authorized_client) @@ -383,7 +447,7 @@ def test_reload_snapshots_bare_bare_bare(authorized_client, dummy_service): assert snaps == [] -def test_reload_snapshots(authorized_client, dummy_service): +def test_reload_snapshots(authorized_client, dummy_service, backups): response = api_backup(authorized_client, dummy_service) data = get_data(response)["backup"]["startBackup"] @@ -395,7 +459,7 @@ def test_reload_snapshots(authorized_client, dummy_service): assert len(snaps) == 1 -def test_forget_snapshot(authorized_client, dummy_service): +def test_forget_snapshot(authorized_client, dummy_service, backups): response = api_backup(authorized_client, dummy_service) data = get_data(response)["backup"]["startBackup"] @@ -410,7 +474,7 @@ def test_forget_snapshot(authorized_client, dummy_service): assert len(snaps) == 0 -def test_forget_nonexistent_snapshot(authorized_client, dummy_service): +def test_forget_nonexistent_snapshot(authorized_client, dummy_service, backups): snaps = api_snapshots(authorized_client) assert len(snaps) == 0 response = api_forget(authorized_client, "898798uekiodpjoiweoiwuoeirueor") diff --git a/tests/test_graphql/test_api_devices.py b/tests/test_graphql/test_api_devices.py index cd76ef7..d521861 100644 --- a/tests/test_graphql/test_api_devices.py +++ b/tests/test_graphql/test_api_devices.py @@ -1,76 +1,78 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument # pylint: disable=missing-function-docstring -import datetime -import pytest -from mnemonic import Mnemonic - -from selfprivacy_api.repositories.tokens.json_tokens_repository import ( - JsonTokensRepository, +from tests.common import ( + RECOVERY_KEY_VALIDATION_DATETIME, + DEVICE_KEY_VALIDATION_DATETIME, + NearFuture, + generate_api_query, +) +from tests.conftest import DEVICE_WE_AUTH_TESTS_WITH +from tests.test_graphql.common import ( + get_data, + assert_empty, + assert_ok, + assert_errorcode, + assert_token_valid, + assert_original, + assert_same, + graphql_get_devices, + request_devices, + set_client_token, + API_DEVICES_QUERY, + ORIGINAL_DEVICES, ) -from selfprivacy_api.models.tokens.token import Token - -from tests.common import generate_api_query, read_json, write_json - -TOKENS_FILE_CONTETS = { - "tokens": [ - { - "token": "TEST_TOKEN", - "name": "test_token", - "date": "2022-01-14 08:31:10.789314", - }, - { - "token": "TEST_TOKEN2", - "name": "test_token2", - "date": "2022-01-14 08:31:10.789314", - }, - ] -} - -API_DEVICES_QUERY = """ -devices { - creationDate - isCaller - name -} -""" -@pytest.fixture -def token_repo(): - return JsonTokensRepository() +def graphql_get_caller_token_info(client): + devices = graphql_get_devices(client) + for device in devices: + if device["isCaller"] is True: + return device -def test_graphql_tokens_info(authorized_client, tokens_file): +def graphql_get_new_device_key(authorized_client) -> str: response = authorized_client.post( "/graphql", - json={"query": generate_api_query([API_DEVICES_QUERY])}, + json={"query": NEW_DEVICE_KEY_MUTATION}, ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["api"]["devices"] is not None - assert len(response.json()["data"]["api"]["devices"]) == 2 - assert ( - response.json()["data"]["api"]["devices"][0]["creationDate"] - == "2022-01-14T08:31:10.789314" - ) - assert response.json()["data"]["api"]["devices"][0]["isCaller"] is True - assert response.json()["data"]["api"]["devices"][0]["name"] == "test_token" - assert ( - response.json()["data"]["api"]["devices"][1]["creationDate"] - == "2022-01-14T08:31:10.789314" - ) - assert response.json()["data"]["api"]["devices"][1]["isCaller"] is False - assert response.json()["data"]["api"]["devices"][1]["name"] == "test_token2" + assert_ok(get_data(response)["api"]["getNewDeviceApiKey"]) + + key = response.json()["data"]["api"]["getNewDeviceApiKey"]["key"] + assert key.split(" ").__len__() == 12 + return key -def test_graphql_tokens_info_unauthorized(client, tokens_file): - response = client.post( +def graphql_try_auth_new_device(client, mnemonic_key, device_name): + return client.post( "/graphql", - json={"query": generate_api_query([API_DEVICES_QUERY])}, + json={ + "query": AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION, + "variables": { + "input": { + "key": mnemonic_key, + "deviceName": device_name, + } + }, + }, ) - assert response.status_code == 200 - assert response.json()["data"] is None + + +def graphql_authorize_new_device(client, mnemonic_key, device_name) -> str: + response = graphql_try_auth_new_device(client, mnemonic_key, "new_device") + assert_ok(get_data(response)["api"]["authorizeWithNewDeviceApiKey"]) + token = response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["token"] + assert_token_valid(client, token) + return token + + +def test_graphql_tokens_info(authorized_client): + assert_original(authorized_client) + + +def test_graphql_tokens_info_unauthorized(client): + response = request_devices(client) + assert_empty(response) DELETE_TOKEN_MUTATION = """ @@ -86,7 +88,7 @@ mutation DeleteToken($device: String!) { """ -def test_graphql_delete_token_unauthorized(client, tokens_file): +def test_graphql_delete_token_unauthorized(client): response = client.post( "/graphql", json={ @@ -96,57 +98,45 @@ def test_graphql_delete_token_unauthorized(client, tokens_file): }, }, ) - assert response.status_code == 200 - assert response.json()["data"] is None + assert_empty(response) -def test_graphql_delete_token(authorized_client, tokens_file): +def test_graphql_delete_token(authorized_client): + test_devices = ORIGINAL_DEVICES.copy() + device_to_delete = test_devices.pop(1) + assert device_to_delete != DEVICE_WE_AUTH_TESTS_WITH + response = authorized_client.post( "/graphql", json={ "query": DELETE_TOKEN_MUTATION, "variables": { - "device": "test_token2", + "device": device_to_delete["name"], }, }, ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["api"]["deleteDeviceApiToken"]["success"] is True - assert response.json()["data"]["api"]["deleteDeviceApiToken"]["message"] is not None - assert response.json()["data"]["api"]["deleteDeviceApiToken"]["code"] == 200 - assert read_json(tokens_file) == { - "tokens": [ - { - "token": "TEST_TOKEN", - "name": "test_token", - "date": "2022-01-14 08:31:10.789314", - } - ] - } + assert_ok(get_data(response)["api"]["deleteDeviceApiToken"]) + + devices = graphql_get_devices(authorized_client) + assert_same(devices, test_devices) -def test_graphql_delete_self_token(authorized_client, tokens_file): +def test_graphql_delete_self_token(authorized_client): response = authorized_client.post( "/graphql", json={ "query": DELETE_TOKEN_MUTATION, "variables": { - "device": "test_token", + "device": DEVICE_WE_AUTH_TESTS_WITH["name"], }, }, ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["api"]["deleteDeviceApiToken"]["success"] is False - assert response.json()["data"]["api"]["deleteDeviceApiToken"]["message"] is not None - assert response.json()["data"]["api"]["deleteDeviceApiToken"]["code"] == 400 - assert read_json(tokens_file) == TOKENS_FILE_CONTETS + assert_errorcode(get_data(response)["api"]["deleteDeviceApiToken"], 400) + assert_original(authorized_client) def test_graphql_delete_nonexistent_token( authorized_client, - tokens_file, ): response = authorized_client.post( "/graphql", @@ -157,12 +147,9 @@ def test_graphql_delete_nonexistent_token( }, }, ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["api"]["deleteDeviceApiToken"]["success"] is False - assert response.json()["data"]["api"]["deleteDeviceApiToken"]["message"] is not None - assert response.json()["data"]["api"]["deleteDeviceApiToken"]["code"] == 404 - assert read_json(tokens_file) == TOKENS_FILE_CONTETS + assert_errorcode(get_data(response)["api"]["deleteDeviceApiToken"], 404) + + assert_original(authorized_client) REFRESH_TOKEN_MUTATION = """ @@ -179,37 +166,27 @@ mutation RefreshToken { """ -def test_graphql_refresh_token_unauthorized(client, tokens_file): +def test_graphql_refresh_token_unauthorized(client): response = client.post( "/graphql", json={"query": REFRESH_TOKEN_MUTATION}, ) - assert response.status_code == 200 - assert response.json()["data"] is None + assert_empty(response) -def test_graphql_refresh_token( - authorized_client, - tokens_file, - token_repo, -): +def test_graphql_refresh_token(authorized_client, client): + caller_name_and_date = graphql_get_caller_token_info(authorized_client) response = authorized_client.post( "/graphql", json={"query": REFRESH_TOKEN_MUTATION}, ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["api"]["refreshDeviceApiToken"]["success"] is True - assert ( - response.json()["data"]["api"]["refreshDeviceApiToken"]["message"] is not None - ) - assert response.json()["data"]["api"]["refreshDeviceApiToken"]["code"] == 200 - token = token_repo.get_token_by_name("test_token") - assert token == Token( - token=response.json()["data"]["api"]["refreshDeviceApiToken"]["token"], - device_name="test_token", - created_at=datetime.datetime(2022, 1, 14, 8, 31, 10, 789314), - ) + assert_ok(get_data(response)["api"]["refreshDeviceApiToken"]) + + new_token = response.json()["data"]["api"]["refreshDeviceApiToken"]["token"] + assert_token_valid(client, new_token) + + set_client_token(client, new_token) + assert graphql_get_caller_token_info(client) == caller_name_and_date NEW_DEVICE_KEY_MUTATION = """ @@ -228,39 +205,12 @@ mutation NewDeviceKey { def test_graphql_get_new_device_auth_key_unauthorized( client, - tokens_file, ): response = client.post( "/graphql", json={"query": NEW_DEVICE_KEY_MUTATION}, ) - assert response.status_code == 200 - assert response.json()["data"] is None - - -def test_graphql_get_new_device_auth_key( - authorized_client, - tokens_file, -): - response = authorized_client.post( - "/graphql", - json={"query": NEW_DEVICE_KEY_MUTATION}, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["api"]["getNewDeviceApiKey"]["success"] is True - assert response.json()["data"]["api"]["getNewDeviceApiKey"]["message"] is not None - assert response.json()["data"]["api"]["getNewDeviceApiKey"]["code"] == 200 - assert ( - response.json()["data"]["api"]["getNewDeviceApiKey"]["key"].split(" ").__len__() - == 12 - ) - token = ( - Mnemonic(language="english") - .to_entropy(response.json()["data"]["api"]["getNewDeviceApiKey"]["key"]) - .hex() - ) - assert read_json(tokens_file)["new_device"]["token"] == token + assert_empty(response) INVALIDATE_NEW_DEVICE_KEY_MUTATION = """ @@ -278,7 +228,6 @@ mutation InvalidateNewDeviceKey { def test_graphql_invalidate_new_device_token_unauthorized( client, - tokens_file, ): response = client.post( "/graphql", @@ -289,48 +238,20 @@ def test_graphql_invalidate_new_device_token_unauthorized( }, }, ) - assert response.status_code == 200 - assert response.json()["data"] is None + assert_empty(response) -def test_graphql_get_and_delete_new_device_key( - authorized_client, - tokens_file, -): - response = authorized_client.post( - "/graphql", - json={"query": NEW_DEVICE_KEY_MUTATION}, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["api"]["getNewDeviceApiKey"]["success"] is True - assert response.json()["data"]["api"]["getNewDeviceApiKey"]["message"] is not None - assert response.json()["data"]["api"]["getNewDeviceApiKey"]["code"] == 200 - assert ( - response.json()["data"]["api"]["getNewDeviceApiKey"]["key"].split(" ").__len__() - == 12 - ) - token = ( - Mnemonic(language="english") - .to_entropy(response.json()["data"]["api"]["getNewDeviceApiKey"]["key"]) - .hex() - ) - assert read_json(tokens_file)["new_device"]["token"] == token +def test_graphql_get_and_delete_new_device_key(client, authorized_client): + mnemonic_key = graphql_get_new_device_key(authorized_client) + response = authorized_client.post( "/graphql", json={"query": INVALIDATE_NEW_DEVICE_KEY_MUTATION}, ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert ( - response.json()["data"]["api"]["invalidateNewDeviceApiKey"]["success"] is True - ) - assert ( - response.json()["data"]["api"]["invalidateNewDeviceApiKey"]["message"] - is not None - ) - assert response.json()["data"]["api"]["invalidateNewDeviceApiKey"]["code"] == 200 - assert read_json(tokens_file) == TOKENS_FILE_CONTETS + assert_ok(get_data(response)["api"]["invalidateNewDeviceApiKey"]) + + response = graphql_try_auth_new_device(client, mnemonic_key, "new_device") + assert_errorcode(get_data(response)["api"]["authorizeWithNewDeviceApiKey"], 404) AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION = """ @@ -347,214 +268,48 @@ mutation AuthorizeWithNewDeviceKey($input: UseNewDeviceKeyInput!) { """ -def test_graphql_get_and_authorize_new_device( - client, - authorized_client, - tokens_file, -): - response = authorized_client.post( - "/graphql", - json={"query": NEW_DEVICE_KEY_MUTATION}, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["api"]["getNewDeviceApiKey"]["success"] is True - assert response.json()["data"]["api"]["getNewDeviceApiKey"]["message"] is not None - assert response.json()["data"]["api"]["getNewDeviceApiKey"]["code"] == 200 - mnemonic_key = response.json()["data"]["api"]["getNewDeviceApiKey"]["key"] - assert mnemonic_key.split(" ").__len__() == 12 - key = Mnemonic(language="english").to_entropy(mnemonic_key).hex() - assert read_json(tokens_file)["new_device"]["token"] == key - response = client.post( - "/graphql", - json={ - "query": AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION, - "variables": { - "input": { - "key": mnemonic_key, - "deviceName": "new_device", - } - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert ( - response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["success"] - is True - ) - assert ( - response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["message"] - is not None - ) - assert response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["code"] == 200 - token = response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["token"] - assert read_json(tokens_file)["tokens"][2]["token"] == token - assert read_json(tokens_file)["tokens"][2]["name"] == "new_device" +def test_graphql_get_and_authorize_new_device(client, authorized_client): + mnemonic_key = graphql_get_new_device_key(authorized_client) + old_devices = graphql_get_devices(authorized_client) + + graphql_authorize_new_device(client, mnemonic_key, "new_device") + new_devices = graphql_get_devices(authorized_client) + + assert len(new_devices) == len(old_devices) + 1 + assert "new_device" in [device["name"] for device in new_devices] -def test_graphql_authorize_new_device_with_invalid_key( - client, - tokens_file, -): - response = client.post( - "/graphql", - json={ - "query": AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION, - "variables": { - "input": { - "key": "invalid_token", - "deviceName": "test_token", - } - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert ( - response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["success"] - is False - ) - assert ( - response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["message"] - is not None - ) - assert response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["code"] == 404 - assert read_json(tokens_file) == TOKENS_FILE_CONTETS +def test_graphql_authorize_new_device_with_invalid_key(client, authorized_client): + response = graphql_try_auth_new_device(client, "invalid_token", "new_device") + assert_errorcode(get_data(response)["api"]["authorizeWithNewDeviceApiKey"], 404) + + assert_original(authorized_client) -def test_graphql_get_and_authorize_used_key( - client, - authorized_client, - tokens_file, -): - response = authorized_client.post( - "/graphql", - json={"query": NEW_DEVICE_KEY_MUTATION}, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["api"]["getNewDeviceApiKey"]["success"] is True - assert response.json()["data"]["api"]["getNewDeviceApiKey"]["message"] is not None - assert response.json()["data"]["api"]["getNewDeviceApiKey"]["code"] == 200 - mnemonic_key = response.json()["data"]["api"]["getNewDeviceApiKey"]["key"] - assert mnemonic_key.split(" ").__len__() == 12 - key = Mnemonic(language="english").to_entropy(mnemonic_key).hex() - assert read_json(tokens_file)["new_device"]["token"] == key - response = client.post( - "/graphql", - json={ - "query": AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION, - "variables": { - "input": { - "key": mnemonic_key, - "deviceName": "new_token", - } - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert ( - response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["success"] - is True - ) - assert ( - response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["message"] - is not None - ) - assert response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["code"] == 200 - assert ( - read_json(tokens_file)["tokens"][2]["token"] - == response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["token"] - ) - assert read_json(tokens_file)["tokens"][2]["name"] == "new_token" +def test_graphql_get_and_authorize_used_key(client, authorized_client): + mnemonic_key = graphql_get_new_device_key(authorized_client) - response = client.post( - "/graphql", - json={ - "query": AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION, - "variables": { - "input": { - "key": NEW_DEVICE_KEY_MUTATION, - "deviceName": "test_token2", - } - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert ( - response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["success"] - is False - ) - assert ( - response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["message"] - is not None - ) - assert response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["code"] == 404 - assert read_json(tokens_file)["tokens"].__len__() == 3 + graphql_authorize_new_device(client, mnemonic_key, "new_device") + devices = graphql_get_devices(authorized_client) + + response = graphql_try_auth_new_device(client, mnemonic_key, "new_device2") + assert_errorcode(get_data(response)["api"]["authorizeWithNewDeviceApiKey"], 404) + + assert graphql_get_devices(authorized_client) == devices def test_graphql_get_and_authorize_key_after_12_minutes( - client, - authorized_client, - tokens_file, + client, authorized_client, mocker ): - response = authorized_client.post( - "/graphql", - json={"query": NEW_DEVICE_KEY_MUTATION}, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["api"]["getNewDeviceApiKey"]["success"] is True - assert response.json()["data"]["api"]["getNewDeviceApiKey"]["message"] is not None - assert response.json()["data"]["api"]["getNewDeviceApiKey"]["code"] == 200 - assert ( - response.json()["data"]["api"]["getNewDeviceApiKey"]["key"].split(" ").__len__() - == 12 - ) - key = ( - Mnemonic(language="english") - .to_entropy(response.json()["data"]["api"]["getNewDeviceApiKey"]["key"]) - .hex() - ) - assert read_json(tokens_file)["new_device"]["token"] == key + mnemonic_key = graphql_get_new_device_key(authorized_client) + mock = mocker.patch(DEVICE_KEY_VALIDATION_DATETIME, NearFuture) - file_data = read_json(tokens_file) - file_data["new_device"]["expiration"] = str( - datetime.datetime.now() - datetime.timedelta(minutes=13) - ) - write_json(tokens_file, file_data) - - response = client.post( - "/graphql", - json={ - "query": AUTHORIZE_WITH_NEW_DEVICE_KEY_MUTATION, - "variables": { - "input": { - "key": key, - "deviceName": "test_token", - } - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert ( - response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["success"] - is False - ) - assert ( - response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["message"] - is not None - ) - assert response.json()["data"]["api"]["authorizeWithNewDeviceApiKey"]["code"] == 404 + response = graphql_try_auth_new_device(client, mnemonic_key, "new_device") + assert_errorcode(get_data(response)["api"]["authorizeWithNewDeviceApiKey"], 404) def test_graphql_authorize_without_token( client, - tokens_file, ): response = client.post( "/graphql", @@ -567,5 +322,4 @@ def test_graphql_authorize_without_token( }, }, ) - assert response.status_code == 200 - assert response.json().get("data") is None + assert_empty(response) diff --git a/tests/test_graphql/test_api_recovery.py b/tests/test_graphql/test_api_recovery.py index 87df666..ea44640 100644 --- a/tests/test_graphql/test_api_recovery.py +++ b/tests/test_graphql/test_api_recovery.py @@ -1,24 +1,33 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument # pylint: disable=missing-function-docstring -import datetime -from tests.common import generate_api_query, mnemonic_to_hex, read_json, write_json +import pytest -TOKENS_FILE_CONTETS = { - "tokens": [ - { - "token": "TEST_TOKEN", - "name": "test_token", - "date": "2022-01-14 08:31:10.789314", - }, - { - "token": "TEST_TOKEN2", - "name": "test_token2", - "date": "2022-01-14 08:31:10.789314", - }, - ] -} +from datetime import datetime, timezone + +from tests.common import ( + generate_api_query, + assert_recovery_recent, + NearFuture, + RECOVERY_KEY_VALIDATION_DATETIME, +) + +# Graphql API's output should be timezone-naive +from tests.common import ten_minutes_into_future_naive_utc as ten_minutes_into_future +from tests.common import ten_minutes_into_future as ten_minutes_into_future_tz +from tests.common import ten_minutes_into_past_naive_utc as ten_minutes_into_past + +from tests.test_graphql.common import ( + assert_empty, + get_data, + assert_ok, + assert_errorcode, + assert_token_valid, + assert_original, + graphql_get_devices, + set_client_token, +) API_RECOVERY_QUERY = """ recoveryKey { @@ -31,28 +40,89 @@ recoveryKey { """ -def test_graphql_recovery_key_status_unauthorized(client, tokens_file): - response = client.post( +def request_recovery_status(client): + return client.post( "/graphql", json={"query": generate_api_query([API_RECOVERY_QUERY])}, ) - assert response.status_code == 200 - assert response.json().get("data") is None -def test_graphql_recovery_key_status_when_none_exists(authorized_client, tokens_file): - response = authorized_client.post( +def graphql_recovery_status(client): + response = request_recovery_status(client) + data = get_data(response) + + status = data["api"]["recoveryKey"] + assert status is not None + return status + + +def request_make_new_recovery_key(client, expires_at=None, uses=None): + json = {"query": API_RECOVERY_KEY_GENERATE_MUTATION} + limits = {} + + if expires_at is not None: + limits["expirationDate"] = expires_at.isoformat() + if uses is not None: + limits["uses"] = uses + + if limits != {}: + json["variables"] = {"limits": limits} + + response = client.post("/graphql", json=json) + return response + + +def graphql_make_new_recovery_key(client, expires_at=None, uses=None): + response = request_make_new_recovery_key(client, expires_at, uses) + output = get_data(response)["api"]["getNewRecoveryApiKey"] + assert_ok(output) + + key = output["key"] + assert key is not None + assert key.split(" ").__len__() == 18 + return key + + +def request_recovery_auth(client, key, device_name): + return client.post( "/graphql", - json={"query": generate_api_query([API_RECOVERY_QUERY])}, + json={ + "query": API_RECOVERY_KEY_USE_MUTATION, + "variables": { + "input": { + "key": key, + "deviceName": device_name, + }, + }, + }, ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["api"]["recoveryKey"] is not None - assert response.json()["data"]["api"]["recoveryKey"]["exists"] is False - assert response.json()["data"]["api"]["recoveryKey"]["valid"] is False - assert response.json()["data"]["api"]["recoveryKey"]["creationDate"] is None - assert response.json()["data"]["api"]["recoveryKey"]["expirationDate"] is None - assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] is None + + +def graphql_use_recovery_key(client, key, device_name): + response = request_recovery_auth(client, key, device_name) + output = get_data(response)["api"]["useRecoveryApiKey"] + assert_ok(output) + + token = output["token"] + assert token is not None + assert_token_valid(client, token) + set_client_token(client, token) + assert device_name in [device["name"] for device in graphql_get_devices(client)] + return token + + +def test_graphql_recovery_key_status_unauthorized(client): + response = request_recovery_status(client) + assert_empty(response) + + +def test_graphql_recovery_key_status_when_none_exists(authorized_client): + status = graphql_recovery_status(authorized_client) + assert status["exists"] is False + assert status["valid"] is False + assert status["creationDate"] is None + assert status["expirationDate"] is None + assert status["usesLeft"] is None API_RECOVERY_KEY_GENERATE_MUTATION = """ @@ -82,287 +152,86 @@ mutation TestUseRecoveryKey($input: UseRecoveryKeyInput!) { """ -def test_graphql_generate_recovery_key(client, authorized_client, tokens_file): - response = authorized_client.post( - "/graphql", - json={ - "query": API_RECOVERY_KEY_GENERATE_MUTATION, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["success"] is True - assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["code"] == 200 - assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] is not None - assert ( - response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] - .split(" ") - .__len__() - == 18 - ) - assert read_json(tokens_file)["recovery_token"] is not None - time_generated = read_json(tokens_file)["recovery_token"]["date"] - assert time_generated is not None - key = response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] - assert ( - datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%f") - - datetime.timedelta(seconds=5) - < datetime.datetime.now() - ) +def test_graphql_generate_recovery_key(client, authorized_client): + key = graphql_make_new_recovery_key(authorized_client) - # Try to get token status - response = authorized_client.post( - "/graphql", - json={"query": generate_api_query([API_RECOVERY_QUERY])}, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["api"]["recoveryKey"] is not None - assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True - assert response.json()["data"]["api"]["recoveryKey"]["valid"] is True - assert response.json()["data"]["api"]["recoveryKey"][ - "creationDate" - ] == time_generated.replace("Z", "") - assert response.json()["data"]["api"]["recoveryKey"]["expirationDate"] is None - assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] is None + status = graphql_recovery_status(authorized_client) + assert status["exists"] is True + assert status["valid"] is True + assert_recovery_recent(status["creationDate"]) + assert status["expirationDate"] is None + assert status["usesLeft"] is None - # Try to use token - response = client.post( - "/graphql", - json={ - "query": API_RECOVERY_KEY_USE_MUTATION, - "variables": { - "input": { - "key": key, - "deviceName": "new_test_token", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["api"]["useRecoveryApiKey"]["success"] is True - assert response.json()["data"]["api"]["useRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["api"]["useRecoveryApiKey"]["code"] == 200 - assert response.json()["data"]["api"]["useRecoveryApiKey"]["token"] is not None - assert ( - response.json()["data"]["api"]["useRecoveryApiKey"]["token"] - == read_json(tokens_file)["tokens"][2]["token"] - ) - assert read_json(tokens_file)["tokens"][2]["name"] == "new_test_token" - - # Try to use token again - response = client.post( - "/graphql", - json={ - "query": API_RECOVERY_KEY_USE_MUTATION, - "variables": { - "input": { - "key": key, - "deviceName": "new_test_token2", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["api"]["useRecoveryApiKey"]["success"] is True - assert response.json()["data"]["api"]["useRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["api"]["useRecoveryApiKey"]["code"] == 200 - assert response.json()["data"]["api"]["useRecoveryApiKey"]["token"] is not None - assert ( - response.json()["data"]["api"]["useRecoveryApiKey"]["token"] - == read_json(tokens_file)["tokens"][3]["token"] - ) - assert read_json(tokens_file)["tokens"][3]["name"] == "new_test_token2" + graphql_use_recovery_key(client, key, "new_test_token") + # And again + graphql_use_recovery_key(client, key, "new_test_token2") +@pytest.mark.parametrize( + "expiration_date", [ten_minutes_into_future(), ten_minutes_into_future_tz()] +) def test_graphql_generate_recovery_key_with_expiration_date( - client, authorized_client, tokens_file + client, authorized_client, expiration_date: datetime ): - expiration_date = datetime.datetime.now() + datetime.timedelta(minutes=5) - expiration_date_str = expiration_date.strftime("%Y-%m-%dT%H:%M:%S.%f") - response = authorized_client.post( - "/graphql", - json={ - "query": API_RECOVERY_KEY_GENERATE_MUTATION, - "variables": { - "limits": { - "expirationDate": expiration_date_str, - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["success"] is True - assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["code"] == 200 - assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] is not None - assert ( - response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] - .split(" ") - .__len__() - == 18 - ) - assert read_json(tokens_file)["recovery_token"] is not None + key = graphql_make_new_recovery_key(authorized_client, expires_at=expiration_date) - key = response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] - assert read_json(tokens_file)["recovery_token"]["expiration"] == expiration_date_str - assert read_json(tokens_file)["recovery_token"]["token"] == mnemonic_to_hex(key) + status = graphql_recovery_status(authorized_client) + assert status["exists"] is True + assert status["valid"] is True + assert_recovery_recent(status["creationDate"]) - time_generated = read_json(tokens_file)["recovery_token"]["date"] - assert time_generated is not None - assert ( - datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%f") - - datetime.timedelta(seconds=5) - < datetime.datetime.now() + # timezone-aware comparison. Should pass regardless of server's tz + assert datetime.fromisoformat(status["expirationDate"]) == expiration_date.replace( + tzinfo=timezone.utc ) - # Try to get token status - response = authorized_client.post( - "/graphql", - json={"query": generate_api_query([API_RECOVERY_QUERY])}, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["api"]["recoveryKey"] is not None - assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True - assert response.json()["data"]["api"]["recoveryKey"]["valid"] is True - assert response.json()["data"]["api"]["recoveryKey"][ - "creationDate" - ] == time_generated.replace("Z", "") - assert ( - response.json()["data"]["api"]["recoveryKey"]["expirationDate"] - == expiration_date_str - ) - assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] is None + assert status["usesLeft"] is None - # Try to use token - response = authorized_client.post( - "/graphql", - json={ - "query": API_RECOVERY_KEY_USE_MUTATION, - "variables": { - "input": { - "key": key, - "deviceName": "new_test_token", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["api"]["useRecoveryApiKey"]["success"] is True - assert response.json()["data"]["api"]["useRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["api"]["useRecoveryApiKey"]["code"] == 200 - assert response.json()["data"]["api"]["useRecoveryApiKey"]["token"] is not None - assert ( - response.json()["data"]["api"]["useRecoveryApiKey"]["token"] - == read_json(tokens_file)["tokens"][2]["token"] - ) - - # Try to use token again - response = authorized_client.post( - "/graphql", - json={ - "query": API_RECOVERY_KEY_USE_MUTATION, - "variables": { - "input": { - "key": key, - "deviceName": "new_test_token2", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["api"]["useRecoveryApiKey"]["success"] is True - assert response.json()["data"]["api"]["useRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["api"]["useRecoveryApiKey"]["code"] == 200 - assert response.json()["data"]["api"]["useRecoveryApiKey"]["token"] is not None - assert ( - response.json()["data"]["api"]["useRecoveryApiKey"]["token"] - == read_json(tokens_file)["tokens"][3]["token"] - ) - - # Try to use token after expiration date - new_data = read_json(tokens_file) - new_data["recovery_token"]["expiration"] = ( - datetime.datetime.now() - datetime.timedelta(minutes=5) - ).strftime("%Y-%m-%dT%H:%M:%S.%f") - write_json(tokens_file, new_data) - response = authorized_client.post( - "/graphql", - json={ - "query": API_RECOVERY_KEY_USE_MUTATION, - "variables": { - "input": { - "key": key, - "deviceName": "new_test_token3", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["api"]["useRecoveryApiKey"]["success"] is False - assert response.json()["data"]["api"]["useRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["api"]["useRecoveryApiKey"]["code"] == 404 - assert response.json()["data"]["api"]["useRecoveryApiKey"]["token"] is None - - assert read_json(tokens_file)["tokens"] == new_data["tokens"] - - # Try to get token status - response = authorized_client.post( - "/graphql", - json={"query": generate_api_query([API_RECOVERY_QUERY])}, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["api"]["recoveryKey"] is not None - assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True - assert response.json()["data"]["api"]["recoveryKey"]["valid"] is False - assert ( - response.json()["data"]["api"]["recoveryKey"]["creationDate"] == time_generated - ) - assert ( - response.json()["data"]["api"]["recoveryKey"]["expirationDate"] - == new_data["recovery_token"]["expiration"] - ) - assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] is None + graphql_use_recovery_key(client, key, "new_test_token") + # And again + graphql_use_recovery_key(client, key, "new_test_token2") -def test_graphql_generate_recovery_key_with_expiration_in_the_past( - authorized_client, tokens_file -): - expiration_date = datetime.datetime.now() - datetime.timedelta(minutes=5) - expiration_date_str = expiration_date.strftime("%Y-%m-%dT%H:%M:%S.%f") +def test_graphql_use_recovery_key_after_expiration(client, authorized_client, mocker): + expiration_date = ten_minutes_into_future() + key = graphql_make_new_recovery_key(authorized_client, expires_at=expiration_date) - response = authorized_client.post( - "/graphql", - json={ - "query": API_RECOVERY_KEY_GENERATE_MUTATION, - "variables": { - "limits": { - "expirationDate": expiration_date_str, - }, - }, - }, + # Timewarp to after it expires + mock = mocker.patch(RECOVERY_KEY_VALIDATION_DATETIME, NearFuture) + + response = request_recovery_auth(client, key, "new_test_token3") + output = get_data(response)["api"]["useRecoveryApiKey"] + assert_errorcode(output, 404) + + assert output["token"] is None + assert_original(authorized_client) + + status = graphql_recovery_status(authorized_client) + assert status["exists"] is True + assert status["valid"] is False + assert_recovery_recent(status["creationDate"]) + + # timezone-aware comparison. Should pass regardless of server's tz + assert datetime.fromisoformat(status["expirationDate"]) == expiration_date.replace( + tzinfo=timezone.utc ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["success"] is False - assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["code"] == 400 - assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] is None - assert "recovery_token" not in read_json(tokens_file) + assert status["usesLeft"] is None -def test_graphql_generate_recovery_key_with_invalid_time_format( - authorized_client, tokens_file -): +def test_graphql_generate_recovery_key_with_expiration_in_the_past(authorized_client): + expiration_date = ten_minutes_into_past() + response = request_make_new_recovery_key( + authorized_client, expires_at=expiration_date + ) + + output = get_data(response)["api"]["getNewRecoveryApiKey"] + assert_errorcode(output, 400) + + assert output["key"] is None + assert graphql_recovery_status(authorized_client)["exists"] is False + + +def test_graphql_generate_recovery_key_with_invalid_time_format(authorized_client): expiration_date = "invalid_time_format" expiration_date_str = expiration_date @@ -377,183 +246,56 @@ def test_graphql_generate_recovery_key_with_invalid_time_format( }, }, ) - assert response.status_code == 200 - assert response.json().get("data") is None - - assert "recovery_token" not in read_json(tokens_file) + assert_empty(response) + assert graphql_recovery_status(authorized_client)["exists"] is False -def test_graphql_generate_recovery_key_with_limited_uses( - authorized_client, tokens_file -): +def test_graphql_generate_recovery_key_with_limited_uses(authorized_client, client): + mnemonic_key = graphql_make_new_recovery_key(authorized_client, uses=2) - response = authorized_client.post( - "/graphql", - json={ - "query": API_RECOVERY_KEY_GENERATE_MUTATION, - "variables": { - "limits": { - "expirationDate": None, - "uses": 2, - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["success"] is True - assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["code"] == 200 - assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] is not None + status = graphql_recovery_status(authorized_client) + assert status["exists"] is True + assert status["valid"] is True + assert status["creationDate"] is not None + assert status["expirationDate"] is None + assert status["usesLeft"] == 2 - mnemonic_key = response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] - key = mnemonic_to_hex(mnemonic_key) + graphql_use_recovery_key(client, mnemonic_key, "new_test_token1") - assert read_json(tokens_file)["recovery_token"]["token"] == key - assert read_json(tokens_file)["recovery_token"]["uses_left"] == 2 + status = graphql_recovery_status(authorized_client) + assert status["exists"] is True + assert status["valid"] is True + assert status["creationDate"] is not None + assert status["expirationDate"] is None + assert status["usesLeft"] == 1 - # Try to get token status - response = authorized_client.post( - "/graphql", - json={"query": generate_api_query([API_RECOVERY_QUERY])}, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["api"]["recoveryKey"] is not None - assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True - assert response.json()["data"]["api"]["recoveryKey"]["valid"] is True - assert response.json()["data"]["api"]["recoveryKey"]["creationDate"] is not None - assert response.json()["data"]["api"]["recoveryKey"]["expirationDate"] is None - assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] == 2 + graphql_use_recovery_key(client, mnemonic_key, "new_test_token2") - # Try to use token - response = authorized_client.post( - "/graphql", - json={ - "query": API_RECOVERY_KEY_USE_MUTATION, - "variables": { - "input": { - "key": mnemonic_key, - "deviceName": "test_token1", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["api"]["useRecoveryApiKey"]["success"] is True - assert response.json()["data"]["api"]["useRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["api"]["useRecoveryApiKey"]["code"] == 200 - assert response.json()["data"]["api"]["useRecoveryApiKey"]["token"] is not None + status = graphql_recovery_status(authorized_client) + assert status["exists"] is True + assert status["valid"] is False + assert status["creationDate"] is not None + assert status["expirationDate"] is None + assert status["usesLeft"] == 0 - # Try to get token status - response = authorized_client.post( - "/graphql", - json={"query": generate_api_query([API_RECOVERY_QUERY])}, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["api"]["recoveryKey"] is not None - assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True - assert response.json()["data"]["api"]["recoveryKey"]["valid"] is True - assert response.json()["data"]["api"]["recoveryKey"]["creationDate"] is not None - assert response.json()["data"]["api"]["recoveryKey"]["expirationDate"] is None - assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] == 1 - - # Try to use token - response = authorized_client.post( - "/graphql", - json={ - "query": API_RECOVERY_KEY_USE_MUTATION, - "variables": { - "input": { - "key": mnemonic_key, - "deviceName": "test_token2", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["api"]["useRecoveryApiKey"]["success"] is True - assert response.json()["data"]["api"]["useRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["api"]["useRecoveryApiKey"]["code"] == 200 - assert response.json()["data"]["api"]["useRecoveryApiKey"]["token"] is not None - - # Try to get token status - response = authorized_client.post( - "/graphql", - json={"query": generate_api_query([API_RECOVERY_QUERY])}, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["api"]["recoveryKey"] is not None - assert response.json()["data"]["api"]["recoveryKey"]["exists"] is True - assert response.json()["data"]["api"]["recoveryKey"]["valid"] is False - assert response.json()["data"]["api"]["recoveryKey"]["creationDate"] is not None - assert response.json()["data"]["api"]["recoveryKey"]["expirationDate"] is None - assert response.json()["data"]["api"]["recoveryKey"]["usesLeft"] == 0 - - # Try to use token - response = authorized_client.post( - "/graphql", - json={ - "query": API_RECOVERY_KEY_USE_MUTATION, - "variables": { - "input": { - "key": mnemonic_key, - "deviceName": "test_token3", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["api"]["useRecoveryApiKey"]["success"] is False - assert response.json()["data"]["api"]["useRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["api"]["useRecoveryApiKey"]["code"] == 404 - assert response.json()["data"]["api"]["useRecoveryApiKey"]["token"] is None + response = request_recovery_auth(client, mnemonic_key, "new_test_token3") + output = get_data(response)["api"]["useRecoveryApiKey"] + assert_errorcode(output, 404) -def test_graphql_generate_recovery_key_with_negative_uses( - authorized_client, tokens_file -): - # Try to get token status - response = authorized_client.post( - "/graphql", - json={ - "query": API_RECOVERY_KEY_GENERATE_MUTATION, - "variables": { - "limits": { - "uses": -1, - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["success"] is False - assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["code"] == 400 - assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] is None +def test_graphql_generate_recovery_key_with_negative_uses(authorized_client): + response = request_make_new_recovery_key(authorized_client, uses=-1) + + output = get_data(response)["api"]["getNewRecoveryApiKey"] + assert_errorcode(output, 400) + assert output["key"] is None + assert graphql_recovery_status(authorized_client)["exists"] is False -def test_graphql_generate_recovery_key_with_zero_uses(authorized_client, tokens_file): - # Try to get token status - response = authorized_client.post( - "/graphql", - json={ - "query": API_RECOVERY_KEY_GENERATE_MUTATION, - "variables": { - "limits": { - "uses": 0, - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["success"] is False - assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["message"] is not None - assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["code"] == 400 - assert response.json()["data"]["api"]["getNewRecoveryApiKey"]["key"] is None +def test_graphql_generate_recovery_key_with_zero_uses(authorized_client): + response = request_make_new_recovery_key(authorized_client, uses=0) + + output = get_data(response)["api"]["getNewRecoveryApiKey"] + assert_errorcode(output, 400) + assert output["key"] is None + assert graphql_recovery_status(authorized_client)["exists"] is False diff --git a/tests/test_graphql/test_repository/test_json_tokens_repository.py b/tests/test_graphql/test_repository/test_json_tokens_repository.py deleted file mode 100644 index af8c844..0000000 --- a/tests/test_graphql/test_repository/test_json_tokens_repository.py +++ /dev/null @@ -1,218 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=missing-function-docstring -""" -tests that restrict json token repository implementation -""" - -import pytest - - -from datetime import datetime - -from selfprivacy_api.models.tokens.token import Token -from selfprivacy_api.repositories.tokens.exceptions import ( - TokenNotFound, - RecoveryKeyNotFound, - NewDeviceKeyNotFound, -) -from selfprivacy_api.repositories.tokens.json_tokens_repository import ( - JsonTokensRepository, -) - -from tests.common import read_json -from test_tokens_repository import ( - mock_recovery_key_generate, - mock_generate_token, - mock_new_device_key_generate, - empty_keys, -) - -ORIGINAL_TOKEN_CONTENT = [ - { - "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", - "name": "primary_token", - "date": "2022-07-15 17:41:31.675698", - }, - { - "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", - "name": "second_token", - "date": "2022-07-15 17:41:31.675698Z", - }, - { - "token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", - "name": "third_token", - "date": "2022-07-15T17:41:31.675698Z", - }, - { - "token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", - "name": "forth_token", - "date": "2022-07-15T17:41:31.675698", - }, -] - - -@pytest.fixture -def tokens(mocker, datadir): - mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=datadir / "tokens.json") - assert read_json(datadir / "tokens.json")["tokens"] == ORIGINAL_TOKEN_CONTENT - return datadir - - -@pytest.fixture -def null_keys(mocker, datadir): - mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=datadir / "null_keys.json") - assert read_json(datadir / "null_keys.json")["recovery_token"] is None - assert read_json(datadir / "null_keys.json")["new_device"] is None - return datadir - - -def test_delete_token(tokens): - repo = JsonTokensRepository() - input_token = Token( - token="KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", - device_name="primary_token", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), - ) - - repo.delete_token(input_token) - assert read_json(tokens / "tokens.json")["tokens"] == [ - { - "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", - "name": "second_token", - "date": "2022-07-15 17:41:31.675698Z", - }, - { - "token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", - "name": "third_token", - "date": "2022-07-15T17:41:31.675698Z", - }, - { - "token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", - "name": "forth_token", - "date": "2022-07-15T17:41:31.675698", - }, - ] - - -def test_delete_not_found_token(tokens): - repo = JsonTokensRepository() - input_token = Token( - token="imbadtoken", - device_name="primary_token", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), - ) - with pytest.raises(TokenNotFound): - assert repo.delete_token(input_token) is None - - assert read_json(tokens / "tokens.json")["tokens"] == ORIGINAL_TOKEN_CONTENT - - -def test_create_recovery_key(tokens, mock_recovery_key_generate): - repo = JsonTokensRepository() - - assert repo.create_recovery_key(uses_left=1, expiration=None) is not None - assert read_json(tokens / "tokens.json")["recovery_token"] == { - "token": "889bf49c1d3199d71a2e704718772bd53a422020334db051", - "date": "2022-07-15T17:41:31.675698", - "expiration": None, - "uses_left": 1, - } - - -def test_use_mnemonic_recovery_key_when_null(null_keys): - repo = JsonTokensRepository() - - with pytest.raises(RecoveryKeyNotFound): - assert ( - repo.use_mnemonic_recovery_key( - mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb", - device_name="primary_token", - ) - is None - ) - - -def test_use_mnemonic_recovery_key(tokens, mock_generate_token): - repo = JsonTokensRepository() - - assert repo.use_mnemonic_recovery_key( - mnemonic_phrase="uniform clarify napkin bid dress search input armor police cross salon because myself uphold slice bamboo hungry park", - device_name="newdevice", - ) == Token( - token="ur71mC4aiI6FIYAN--cTL-38rPHS5D6NuB1bgN_qKF4", - device_name="newdevice", - created_at=datetime(2022, 11, 14, 6, 6, 32, 777123), - ) - - assert read_json(tokens / "tokens.json")["tokens"] == [ - { - "date": "2022-07-15 17:41:31.675698", - "name": "primary_token", - "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", - }, - { - "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", - "name": "second_token", - "date": "2022-07-15 17:41:31.675698Z", - }, - { - "token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", - "name": "third_token", - "date": "2022-07-15T17:41:31.675698Z", - }, - { - "token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", - "name": "forth_token", - "date": "2022-07-15T17:41:31.675698", - }, - { - "date": "2022-11-14T06:06:32.777123", - "name": "newdevice", - "token": "ur71mC4aiI6FIYAN--cTL-38rPHS5D6NuB1bgN_qKF4", - }, - ] - assert read_json(tokens / "tokens.json")["recovery_token"] == { - "date": "2022-11-11T11:48:54.228038", - "expiration": None, - "token": "ed653e4b8b042b841d285fa7a682fa09e925ddb2d8906f54", - "uses_left": 1, - } - - -def test_get_new_device_key(tokens, mock_new_device_key_generate): - repo = JsonTokensRepository() - - assert repo.get_new_device_key() is not None - assert read_json(tokens / "tokens.json")["new_device"] == { - "date": "2022-07-15T17:41:31.675698", - "expiration": "2022-07-15T17:41:31.675698", - "token": "43478d05b35e4781598acd76e33832bb", - } - - -def test_delete_new_device_key(tokens): - repo = JsonTokensRepository() - - assert repo.delete_new_device_key() is None - assert "new_device" not in read_json(tokens / "tokens.json") - - -def test_delete_new_device_key_when_empty(empty_keys): - repo = JsonTokensRepository() - - repo.delete_new_device_key() - assert "new_device" not in read_json(empty_keys / "empty_keys.json") - - -def test_use_mnemonic_new_device_key_when_null(null_keys): - repo = JsonTokensRepository() - - with pytest.raises(NewDeviceKeyNotFound): - assert ( - repo.use_mnemonic_new_device_key( - device_name="imnew", - mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb", - ) - is None - ) diff --git a/tests/test_graphql/test_repository/test_json_tokens_repository/empty_keys.json b/tests/test_graphql/test_repository/test_json_tokens_repository/empty_keys.json deleted file mode 100644 index 2131ddf..0000000 --- a/tests/test_graphql/test_repository/test_json_tokens_repository/empty_keys.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "tokens": [ - { - "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", - "name": "primary_token", - "date": "2022-07-15 17:41:31.675698" - } - ] -} diff --git a/tests/test_graphql/test_repository/test_json_tokens_repository/null_keys.json b/tests/test_graphql/test_repository/test_json_tokens_repository/null_keys.json deleted file mode 100644 index 45e6f90..0000000 --- a/tests/test_graphql/test_repository/test_json_tokens_repository/null_keys.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "tokens": [ - { - "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", - "name": "primary_token", - "date": "2022-07-15 17:41:31.675698" - }, - { - "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", - "name": "second_token", - "date": "2022-07-15 17:41:31.675698Z" - }, - { - "token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", - "name": "third_token", - "date": "2022-07-15T17:41:31.675698Z" - }, - { - "token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", - "name": "forth_token", - "date": "2022-07-15T17:41:31.675698" - } - ], - "recovery_token": null, - "new_device": null -} diff --git a/tests/test_graphql/test_repository/test_json_tokens_repository/tokens.json b/tests/test_graphql/test_repository/test_json_tokens_repository/tokens.json deleted file mode 100644 index bb1805c..0000000 --- a/tests/test_graphql/test_repository/test_json_tokens_repository/tokens.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "tokens": [ - { - "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", - "name": "primary_token", - "date": "2022-07-15 17:41:31.675698" - }, - { - "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", - "name": "second_token", - "date": "2022-07-15 17:41:31.675698Z" - }, - { - "token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", - "name": "third_token", - "date": "2022-07-15T17:41:31.675698Z" - }, - { - "token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", - "name": "forth_token", - "date": "2022-07-15T17:41:31.675698" - } - ], - "recovery_token": { - "token": "ed653e4b8b042b841d285fa7a682fa09e925ddb2d8906f54", - "date": "2022-11-11T11:48:54.228038", - "expiration": null, - "uses_left": 2 - }, - "new_device": { - "token": "2237238de23dc71ab558e317bdb8ff8e", - "date": "2022-10-26 20:50:47.973212", - "expiration": "2022-10-26 21:00:47.974153" - } -} diff --git a/tests/test_graphql/test_repository/test_tokens_repository/empty_keys.json b/tests/test_graphql/test_repository/test_tokens_repository/empty_keys.json deleted file mode 100644 index 2131ddf..0000000 --- a/tests/test_graphql/test_repository/test_tokens_repository/empty_keys.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "tokens": [ - { - "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", - "name": "primary_token", - "date": "2022-07-15 17:41:31.675698" - } - ] -} diff --git a/tests/test_graphql/test_repository/test_tokens_repository/null_keys.json b/tests/test_graphql/test_repository/test_tokens_repository/null_keys.json deleted file mode 100644 index 45e6f90..0000000 --- a/tests/test_graphql/test_repository/test_tokens_repository/null_keys.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "tokens": [ - { - "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", - "name": "primary_token", - "date": "2022-07-15 17:41:31.675698" - }, - { - "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", - "name": "second_token", - "date": "2022-07-15 17:41:31.675698Z" - }, - { - "token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", - "name": "third_token", - "date": "2022-07-15T17:41:31.675698Z" - }, - { - "token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", - "name": "forth_token", - "date": "2022-07-15T17:41:31.675698" - } - ], - "recovery_token": null, - "new_device": null -} diff --git a/tests/test_graphql/test_repository/test_tokens_repository/tokens.json b/tests/test_graphql/test_repository/test_tokens_repository/tokens.json deleted file mode 100644 index bb1805c..0000000 --- a/tests/test_graphql/test_repository/test_tokens_repository/tokens.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "tokens": [ - { - "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", - "name": "primary_token", - "date": "2022-07-15 17:41:31.675698" - }, - { - "token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", - "name": "second_token", - "date": "2022-07-15 17:41:31.675698Z" - }, - { - "token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8", - "name": "third_token", - "date": "2022-07-15T17:41:31.675698Z" - }, - { - "token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM", - "name": "forth_token", - "date": "2022-07-15T17:41:31.675698" - } - ], - "recovery_token": { - "token": "ed653e4b8b042b841d285fa7a682fa09e925ddb2d8906f54", - "date": "2022-11-11T11:48:54.228038", - "expiration": null, - "uses_left": 2 - }, - "new_device": { - "token": "2237238de23dc71ab558e317bdb8ff8e", - "date": "2022-10-26 20:50:47.973212", - "expiration": "2022-10-26 21:00:47.974153" - } -} diff --git a/tests/test_graphql/test_services.py b/tests/test_graphql/test_services.py new file mode 100644 index 0000000..3983b56 --- /dev/null +++ b/tests/test_graphql/test_services.py @@ -0,0 +1,517 @@ +import pytest +from typing import Generator + +from selfprivacy_api.utils.block_devices import BlockDevices + +import selfprivacy_api.services as service_module +from selfprivacy_api.services import get_service_by_id +from selfprivacy_api.services.service import Service, ServiceStatus +from selfprivacy_api.services.test_service import DummyService + +from tests.common import generate_service_query +from tests.test_graphql.common import assert_empty, assert_ok, get_data + + +@pytest.fixture() +def only_dummy_service(dummy_service) -> Generator[DummyService, None, None]: + # because queries to services that are not really there error out + back_copy = service_module.services.copy() + service_module.services.clear() + service_module.services.append(dummy_service) + yield dummy_service + service_module.services.clear() + service_module.services.extend(back_copy) + + +API_START_MUTATION = """ +mutation TestStartService($service_id: String!) { + services { + startService(serviceId: $service_id) { + success + message + code + service { + id + status + } + } + } +} +""" + +API_RESTART_MUTATION = """ +mutation TestRestartService($service_id: String!) { + services { + restartService(serviceId: $service_id) { + success + message + code + service { + id + status + } + } + } +} +""" + +API_ENABLE_MUTATION = """ +mutation TestStartService($service_id: String!) { + services { + enableService(serviceId: $service_id) { + success + message + code + service { + id + isEnabled + } + } + } +} +""" +API_DISABLE_MUTATION = """ +mutation TestStartService($service_id: String!) { + services { + disableService(serviceId: $service_id) { + success + message + code + service { + id + isEnabled + } + } + } +} +""" + +API_STOP_MUTATION = """ +mutation TestStopService($service_id: String!) { + services { + stopService(serviceId: $service_id) { + success + message + code + service { + id + status + } + } + } +} + +""" +API_SERVICES_QUERY = """ +allServices { + id + status + isEnabled +} +""" + +API_MOVE_MUTATION = """ +mutation TestMoveService($input: MoveServiceInput!) { + services { + moveService(input: $input) { + success + message + code + job { + uid + status + } + service { + id + status + } + } + } +} +""" + + +def assert_notfound(data): + assert_errorcode(data, 404) + + +def assert_errorcode(data, errorcode): + assert data["code"] == errorcode + assert data["success"] is False + assert data["message"] is not None + + +def api_enable(client, service: Service) -> dict: + return api_enable_by_name(client, service.get_id()) + + +def api_enable_by_name(client, service_id: str) -> dict: + response = client.post( + "/graphql", + json={ + "query": API_ENABLE_MUTATION, + "variables": {"service_id": service_id}, + }, + ) + return response + + +def api_disable(client, service: Service) -> dict: + return api_disable_by_name(client, service.get_id()) + + +def api_disable_by_name(client, service_id: str) -> dict: + response = client.post( + "/graphql", + json={ + "query": API_DISABLE_MUTATION, + "variables": {"service_id": service_id}, + }, + ) + return response + + +def api_start(client, service: Service) -> dict: + return api_start_by_name(client, service.get_id()) + + +def api_start_by_name(client, service_id: str) -> dict: + response = client.post( + "/graphql", + json={ + "query": API_START_MUTATION, + "variables": {"service_id": service_id}, + }, + ) + return response + + +def api_move(client, service: Service, location: str) -> dict: + return api_move_by_name(client, service.get_id(), location) + + +def api_move_by_name(client, service_id: str, location: str) -> dict: + response = client.post( + "/graphql", + json={ + "query": API_MOVE_MUTATION, + "variables": { + "input": { + "serviceId": service_id, + "location": location, + } + }, + }, + ) + return response + + +def api_restart(client, service: Service) -> dict: + return api_restart_by_name(client, service.get_id()) + + +def api_restart_by_name(client, service_id: str) -> dict: + response = client.post( + "/graphql", + json={ + "query": API_RESTART_MUTATION, + "variables": {"service_id": service_id}, + }, + ) + return response + + +def api_stop(client, service: Service) -> dict: + return api_stop_by_name(client, service.get_id()) + + +def api_stop_by_name(client, service_id: str) -> dict: + response = client.post( + "/graphql", + json={ + "query": API_STOP_MUTATION, + "variables": {"service_id": service_id}, + }, + ) + return response + + +def api_all_services(authorized_client): + response = api_all_services_raw(authorized_client) + data = get_data(response) + result = data["services"]["allServices"] + assert result is not None + return result + + +def api_all_services_raw(client): + return client.post( + "/graphql", + json={"query": generate_service_query([API_SERVICES_QUERY])}, + ) + + +def api_service(authorized_client, service: Service): + id = service.get_id() + for _service in api_all_services(authorized_client): + if _service["id"] == id: + return _service + + +def test_get_services(authorized_client, only_dummy_service): + services = api_all_services(authorized_client) + assert len(services) == 1 + + api_dummy_service = services[0] + assert api_dummy_service["id"] == "testservice" + assert api_dummy_service["status"] == ServiceStatus.ACTIVE.value + assert api_dummy_service["isEnabled"] is True + + +def test_enable_return_value(authorized_client, only_dummy_service): + dummy_service = only_dummy_service + mutation_response = api_enable(authorized_client, dummy_service) + data = get_data(mutation_response)["services"]["enableService"] + assert_ok(data) + service = data["service"] + assert service["id"] == dummy_service.get_id() + assert service["isEnabled"] == True + + +def test_disable_return_value(authorized_client, only_dummy_service): + dummy_service = only_dummy_service + mutation_response = api_disable(authorized_client, dummy_service) + data = get_data(mutation_response)["services"]["disableService"] + assert_ok(data) + service = data["service"] + assert service["id"] == dummy_service.get_id() + assert service["isEnabled"] == False + + +def test_start_return_value(authorized_client, only_dummy_service): + dummy_service = only_dummy_service + mutation_response = api_start(authorized_client, dummy_service) + data = get_data(mutation_response)["services"]["startService"] + assert_ok(data) + service = data["service"] + assert service["id"] == dummy_service.get_id() + assert service["status"] == ServiceStatus.ACTIVE.value + + +def test_restart(authorized_client, only_dummy_service): + dummy_service = only_dummy_service + dummy_service.set_delay(0.3) + mutation_response = api_restart(authorized_client, dummy_service) + data = get_data(mutation_response)["services"]["restartService"] + assert_ok(data) + service = data["service"] + assert service["id"] == dummy_service.get_id() + assert service["status"] == ServiceStatus.RELOADING.value + + +def test_stop_return_value(authorized_client, only_dummy_service): + dummy_service = only_dummy_service + mutation_response = api_stop(authorized_client, dummy_service) + data = get_data(mutation_response)["services"]["stopService"] + assert_ok(data) + service = data["service"] + assert service["id"] == dummy_service.get_id() + assert service["status"] == ServiceStatus.INACTIVE.value + + +def test_allservices_unauthorized(client, only_dummy_service): + dummy_service = only_dummy_service + response = api_all_services_raw(client) + + assert response.status_code == 200 + assert response.json().get("data") is None + + +def test_start_unauthorized(client, only_dummy_service): + dummy_service = only_dummy_service + response = api_start(client, dummy_service) + assert_empty(response) + + +def test_restart_unauthorized(client, only_dummy_service): + dummy_service = only_dummy_service + response = api_restart(client, dummy_service) + assert_empty(response) + + +def test_stop_unauthorized(client, only_dummy_service): + dummy_service = only_dummy_service + response = api_stop(client, dummy_service) + assert_empty(response) + + +def test_enable_unauthorized(client, only_dummy_service): + dummy_service = only_dummy_service + response = api_enable(client, dummy_service) + assert_empty(response) + + +def test_disable_unauthorized(client, only_dummy_service): + dummy_service = only_dummy_service + response = api_disable(client, dummy_service) + assert_empty(response) + + +def test_move_unauthorized(client, only_dummy_service): + dummy_service = only_dummy_service + response = api_move(client, dummy_service, "sda1") + assert_empty(response) + + +def test_start_nonexistent(authorized_client, only_dummy_service): + dummy_service = only_dummy_service + mutation_response = api_start_by_name(authorized_client, "bogus_service") + data = get_data(mutation_response)["services"]["startService"] + assert_notfound(data) + + assert data["service"] is None + + +def test_restart_nonexistent(authorized_client, only_dummy_service): + dummy_service = only_dummy_service + mutation_response = api_restart_by_name(authorized_client, "bogus_service") + data = get_data(mutation_response)["services"]["restartService"] + assert_notfound(data) + + assert data["service"] is None + + +def test_stop_nonexistent(authorized_client, only_dummy_service): + dummy_service = only_dummy_service + mutation_response = api_stop_by_name(authorized_client, "bogus_service") + data = get_data(mutation_response)["services"]["stopService"] + assert_notfound(data) + + assert data["service"] is None + + +def test_enable_nonexistent(authorized_client, only_dummy_service): + dummy_service = only_dummy_service + mutation_response = api_enable_by_name(authorized_client, "bogus_service") + data = get_data(mutation_response)["services"]["enableService"] + assert_notfound(data) + + assert data["service"] is None + + +def test_disable_nonexistent(authorized_client, only_dummy_service): + dummy_service = only_dummy_service + mutation_response = api_disable_by_name(authorized_client, "bogus_service") + data = get_data(mutation_response)["services"]["disableService"] + assert_notfound(data) + + assert data["service"] is None + + +def test_stop_start(authorized_client, only_dummy_service): + dummy_service = only_dummy_service + + api_dummy_service = api_all_services(authorized_client)[0] + assert api_dummy_service["status"] == ServiceStatus.ACTIVE.value + + # attempting to start an already started service + api_start(authorized_client, dummy_service) + api_dummy_service = api_all_services(authorized_client)[0] + assert api_dummy_service["status"] == ServiceStatus.ACTIVE.value + + api_stop(authorized_client, dummy_service) + api_dummy_service = api_all_services(authorized_client)[0] + assert api_dummy_service["status"] == ServiceStatus.INACTIVE.value + + # attempting to stop an already stopped service + api_stop(authorized_client, dummy_service) + api_dummy_service = api_all_services(authorized_client)[0] + assert api_dummy_service["status"] == ServiceStatus.INACTIVE.value + + api_start(authorized_client, dummy_service) + api_dummy_service = api_all_services(authorized_client)[0] + assert api_dummy_service["status"] == ServiceStatus.ACTIVE.value + + +def test_disable_enable(authorized_client, only_dummy_service): + dummy_service = only_dummy_service + + api_dummy_service = api_all_services(authorized_client)[0] + assert api_dummy_service["isEnabled"] is True + + # attempting to enable an already enableed service + api_enable(authorized_client, dummy_service) + api_dummy_service = api_all_services(authorized_client)[0] + assert api_dummy_service["isEnabled"] is True + assert api_dummy_service["status"] == ServiceStatus.ACTIVE.value + + api_disable(authorized_client, dummy_service) + api_dummy_service = api_all_services(authorized_client)[0] + assert api_dummy_service["isEnabled"] is False + assert api_dummy_service["status"] == ServiceStatus.ACTIVE.value + + # attempting to disable an already disableped service + api_disable(authorized_client, dummy_service) + api_dummy_service = api_all_services(authorized_client)[0] + assert api_dummy_service["isEnabled"] is False + assert api_dummy_service["status"] == ServiceStatus.ACTIVE.value + + api_enable(authorized_client, dummy_service) + api_dummy_service = api_all_services(authorized_client)[0] + assert api_dummy_service["isEnabled"] is True + assert api_dummy_service["status"] == ServiceStatus.ACTIVE.value + + +def test_move_immovable(authorized_client, only_dummy_service): + dummy_service = only_dummy_service + dummy_service.set_movable(False) + mutation_response = api_move(authorized_client, dummy_service, "sda1") + data = get_data(mutation_response)["services"]["moveService"] + assert_errorcode(data, 400) + + # is there a meaning in returning the service in this? + assert data["service"] is not None + assert data["job"] is None + + +def test_move_no_such_volume(authorized_client, only_dummy_service): + dummy_service = only_dummy_service + mutation_response = api_move(authorized_client, dummy_service, "bogus_volume") + data = get_data(mutation_response)["services"]["moveService"] + assert_notfound(data) + + # is there a meaning in returning the service in this? + assert data["service"] is not None + assert data["job"] is None + + +def test_move_same_volume(authorized_client, dummy_service): + # dummy_service = only_dummy_service + + # we need a drive that actually exists + root_volume = BlockDevices().get_root_block_device() + dummy_service.set_simulated_moves(False) + dummy_service.set_drive(root_volume.name) + + mutation_response = api_move(authorized_client, dummy_service, root_volume.name) + data = get_data(mutation_response)["services"]["moveService"] + assert_errorcode(data, 400) + + # is there a meaning in returning the service in this? + assert data["service"] is not None + assert data["job"] is not None + + +def test_mailservice_cannot_enable_disable(authorized_client): + mailservice = get_service_by_id("simple-nixos-mailserver") + + mutation_response = api_enable(authorized_client, mailservice) + data = get_data(mutation_response)["services"]["enableService"] + assert_errorcode(data, 400) + # TODO?: we cannot convert mailservice to graphql Service without /var/domain yet + # assert data["service"] is not None + + mutation_response = api_disable(authorized_client, mailservice) + data = get_data(mutation_response)["services"]["disableService"] + assert_errorcode(data, 400) + # assert data["service"] is not None diff --git a/tests/test_graphql/test_ssh.py b/tests/test_graphql/test_ssh.py index 5f888c8..945f105 100644 --- a/tests/test_graphql/test_ssh.py +++ b/tests/test_graphql/test_ssh.py @@ -1,8 +1,25 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument import pytest +from typing import Optional -from tests.common import read_json +from selfprivacy_api.graphql.mutations.system_mutations import SystemMutations +from selfprivacy_api.graphql.queries.system import System + +# only allowed in fixtures and utils +from selfprivacy_api.actions.ssh import remove_ssh_key, get_ssh_settings +from selfprivacy_api.actions.users import get_users, UserDataUserOrigin + +from tests.common import read_json, generate_system_query, generate_users_query +from tests.test_graphql.common import ( + assert_empty, + assert_ok, + get_data, + assert_errorcode, +) +from tests.test_graphql.test_users import API_USERS_INFO + +key_users = ["root", "tester", "user1", "user2", "user3"] class ProcessMock: @@ -12,7 +29,7 @@ class ProcessMock: self.args = args self.kwargs = kwargs - def communicate(): # pylint: disable=no-method-argument + def communicate(self): # pylint: disable=no-method-argument return (b"NEW_HASHED", None) returncode = 0 @@ -39,7 +56,56 @@ def some_users(mocker, datadir): return datadir -# TESTS ######################################################## +@pytest.fixture +def no_rootkeys(generic_userdata): + for rootkey in get_ssh_settings().rootKeys: + remove_ssh_key("root", rootkey) + assert get_ssh_settings().rootKeys == [] + + +@pytest.fixture +def no_keys(generic_userdata): + # this removes root and admin keys too + + users = get_users() + for user in users: + for key in user.ssh_keys: + remove_ssh_key(user.username, key) + users = get_users() + for user in users: + assert user.ssh_keys == [] + + +@pytest.fixture +def no_admin_key(generic_userdata, authorized_client): + admin_keys = api_get_user_keys(authorized_client, admin_name()) + + for admin_key in admin_keys: + remove_ssh_key(admin_name(), admin_key) + + assert api_get_user_keys(authorized_client, admin_name()) == [] + + +def admin_name() -> Optional[str]: + users = get_users() + for user in users: + if user.origin == UserDataUserOrigin.PRIMARY: + return user.username + return None + + +def api_get_user_keys(authorized_client, user: str): + response = authorized_client.post( + "/graphql", + json={ + "query": generate_users_query([API_USERS_INFO]), + }, + ) + data = get_data(response)["users"]["allUsers"] + for _user in data: + if _user["username"] == user: + return _user["sshKeys"] + return None API_CREATE_SSH_KEY_MUTATION = """ @@ -58,6 +124,248 @@ mutation addSshKey($sshInput: SshMutationInput!) { } """ +API_SET_SSH_SETTINGS = """ +mutation enableSsh($settings: SSHSettingsInput!) { + system { + changeSshSettings(settings: $settings) { + success + message + code + enable + passwordAuthentication + } + } +} + +""" + +API_SSH_SETTINGS_QUERY = """ +settings { + ssh { + enable + passwordAuthentication + } +} +""" + + +API_ROOTKEYS_QUERY = """ +settings { + ssh { + rootSshKeys + } +} +""" + + +def api_ssh_settings_raw(client): + return client.post( + "/graphql", + json={"query": generate_system_query([API_SSH_SETTINGS_QUERY])}, + ) + + +def api_rootkeys_raw(client): + return client.post( + "/graphql", + json={"query": generate_system_query([API_ROOTKEYS_QUERY])}, + ) + + +def api_add_ssh_key(authorized_client, user: str, key: str): + response = authorized_client.post( + "/graphql", + json={ + "query": API_CREATE_SSH_KEY_MUTATION, + "variables": { + "sshInput": { + "username": user, + "sshKey": key, + }, + }, + }, + ) + data = get_data(response) + result = data["users"]["addSshKey"] + assert result is not None + return result + + +def api_remove_ssh_key(authorized_client, user: str, key: str): + response = authorized_client.post( + "/graphql", + json={ + "query": API_REMOVE_SSH_KEY_MUTATION, + "variables": { + "sshInput": { + "username": user, + "sshKey": key, + }, + }, + }, + ) + data = get_data(response) + result = data["users"]["removeSshKey"] + assert result is not None + return result + + +def api_rootkeys(authorized_client): + response = api_rootkeys_raw(authorized_client) + data = get_data(response) + result = data["system"]["settings"]["ssh"]["rootSshKeys"] + assert result is not None + return result + + +def api_ssh_settings(authorized_client): + response = api_ssh_settings_raw(authorized_client) + data = get_data(response) + result = data["system"]["settings"]["ssh"] + assert result is not None + return result + + +def api_set_ssh_settings_dict(authorized_client, dict): + response = authorized_client.post( + "/graphql", + json={ + "query": API_SET_SSH_SETTINGS, + "variables": { + "settings": dict, + }, + }, + ) + data = get_data(response) + result = data["system"]["changeSshSettings"] + assert result is not None + return result + + +def api_set_ssh_settings(authorized_client, enable: bool, password_auth: bool): + return api_set_ssh_settings_dict( + authorized_client, + { + "enable": enable, + "passwordAuthentication": password_auth, + }, + ) + + +# TESTS ######################################################## + + +def test_graphql_ssh_query(authorized_client, some_users): + settings = api_ssh_settings(authorized_client) + assert settings["enable"] is True + assert settings["passwordAuthentication"] is True + + +def test_graphql_get_ssh_settings_unauthorized(client, some_users): + response = api_ssh_settings_raw(client) + assert_empty(response) + + +def test_graphql_change_ssh_settings_unauthorized(client, some_users): + response = client.post( + "/graphql", + json={ + "query": API_SET_SSH_SETTINGS, + "variables": { + "sshInput": { + "enable": True, + "passwordAuthentication": True, + }, + }, + }, + ) + assert_empty(response) + + +def assert_includes(smaller_dict: dict, bigger_dict: dict): + for item in smaller_dict.items(): + assert item in bigger_dict.items() + + +available_settings = [ + {"enable": True, "passwordAuthentication": True}, + {"enable": True, "passwordAuthentication": False}, + {"enable": False, "passwordAuthentication": True}, + {"enable": False, "passwordAuthentication": False}, +] + + +original_settings = [ + {"enable": True, "passwordAuthentication": True}, + {"enable": True, "passwordAuthentication": False}, + {"enable": False, "passwordAuthentication": True}, + {"enable": False, "passwordAuthentication": False}, +] + + +@pytest.mark.parametrize("original_settings", original_settings) +@pytest.mark.parametrize("settings", available_settings) +def test_graphql_readwrite_ssh_settings( + authorized_client, some_users, settings, original_settings +): + # Userdata-related tests like undefined fields are in actions-level tests. + output = api_set_ssh_settings_dict(authorized_client, original_settings) + assert_includes(api_ssh_settings(authorized_client), output) + + output = api_set_ssh_settings_dict(authorized_client, settings) + assert_ok(output) + assert_includes(settings, output) + if "enable" not in settings.keys(): + assert output["enable"] == original_settings["enable"] + assert_includes(api_ssh_settings(authorized_client), output) + + +forbidden_settings = [ + # we include this here so that if the next version makes the fields + # optional, the tests will remind the person that tests are to be extended accordingly + {"enable": True}, + {"passwordAuthentication": True}, +] + + +@pytest.mark.parametrize("original_settings", original_settings) +@pytest.mark.parametrize("settings", forbidden_settings) +def test_graphql_readwrite_ssh_settings_partial( + authorized_client, some_users, settings, original_settings +): + output = api_set_ssh_settings_dict(authorized_client, original_settings) + with pytest.raises(Exception): + output = api_set_ssh_settings_dict(authorized_client, settings) + + +def test_graphql_disable_twice(authorized_client, some_users): + output = api_set_ssh_settings(authorized_client, enable=False, password_auth=False) + assert_ok(output) + assert output["enable"] is False + assert output["passwordAuthentication"] is False + + output = api_set_ssh_settings(authorized_client, enable=False, password_auth=False) + assert_ok(output) + assert output["enable"] is False + assert output["passwordAuthentication"] is False + + +def test_graphql_enable_twice(authorized_client, some_users): + output = api_set_ssh_settings(authorized_client, enable=True, password_auth=True) + assert_ok(output) + assert output["enable"] is True + assert output["passwordAuthentication"] is True + assert_includes(api_ssh_settings(authorized_client), output) + + output = api_set_ssh_settings(authorized_client, enable=True, password_auth=True) + assert_ok(output) + assert output["enable"] is True + assert output["passwordAuthentication"] is True + assert_includes(api_ssh_settings(authorized_client), output) + + +############## KEYS + def test_graphql_add_ssh_key_unauthorized(client, some_users, mock_subprocess_popen): response = client.post( @@ -72,110 +380,84 @@ def test_graphql_add_ssh_key_unauthorized(client, some_users, mock_subprocess_po }, }, ) - assert response.status_code == 200 - assert response.json().get("data") is None + assert_empty(response) -def test_graphql_add_ssh_key(authorized_client, some_users, mock_subprocess_popen): - response = authorized_client.post( - "/graphql", - json={ - "query": API_CREATE_SSH_KEY_MUTATION, - "variables": { - "sshInput": { - "username": "user1", - "sshKey": "ssh-rsa KEY test_key@pc", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None +# Unauth getting of keys is tested in test_users.py because it is a part of users interface - assert response.json()["data"]["users"]["addSshKey"]["code"] == 201 - assert response.json()["data"]["users"]["addSshKey"]["message"] is not None - assert response.json()["data"]["users"]["addSshKey"]["success"] is True - assert response.json()["data"]["users"]["addSshKey"]["user"]["username"] == "user1" - assert response.json()["data"]["users"]["addSshKey"]["user"]["sshKeys"] == [ - "ssh-rsa KEY user1@pc", +def test_graphql_get_root_key(authorized_client, some_users): + assert api_rootkeys(authorized_client) == ["ssh-ed25519 KEY test@pc"] + + +def test_graphql_get_root_key_when_none(authorized_client, no_rootkeys): + assert api_rootkeys(authorized_client) == [] + + +# Getting admin keys when they are present is tested in test_users.py + + +def test_get_admin_key_when_none(authorized_client, no_admin_key): + assert api_get_user_keys(authorized_client, admin_name()) == [] + + +@pytest.mark.parametrize("user", key_users) +def test_graphql_add_ssh_key_when_none(authorized_client, no_keys, user): + key1 = "ssh-rsa KEY test_key@pc" + if user == "root": + assert api_rootkeys(authorized_client) == [] + else: + assert api_get_user_keys(authorized_client, user) == [] + + output = api_add_ssh_key(authorized_client, user, key1) + + assert_ok(output, code=201) + + assert output["user"]["username"] == user + assert output["user"]["sshKeys"] == [key1] + + if user == "root": + assert api_rootkeys(authorized_client) == [key1] + else: + assert api_get_user_keys(authorized_client, user) == [key1] + + +@pytest.mark.parametrize("user", key_users) +def test_graphql_add_ssh_key_one_more(authorized_client, no_keys, user): + keys = [ "ssh-rsa KEY test_key@pc", + "ssh-rsa KEY2 test_key@pc", ] + output = api_add_ssh_key(authorized_client, user, keys[0]) + assert output["user"]["sshKeys"] == [keys[0]] + + output = api_add_ssh_key(authorized_client, user, keys[1]) + + assert_ok(output, code=201) + + assert output["user"]["username"] == user + assert output["user"]["sshKeys"] == keys + + if user == "root": + assert api_rootkeys(authorized_client) == keys + else: + assert api_get_user_keys(authorized_client, user) == keys -def test_graphql_add_root_ssh_key(authorized_client, some_users, mock_subprocess_popen): - response = authorized_client.post( - "/graphql", - json={ - "query": API_CREATE_SSH_KEY_MUTATION, - "variables": { - "sshInput": { - "username": "root", - "sshKey": "ssh-rsa KEY test_key@pc", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None +@pytest.mark.parametrize("user", key_users) +def test_graphql_add_ssh_key_same(authorized_client, no_keys, user): + key = "ssh-rsa KEY test_key@pc" + output = api_add_ssh_key(authorized_client, user, key) + assert output["user"]["sshKeys"] == [key] - assert response.json()["data"]["users"]["addSshKey"]["code"] == 201 - assert response.json()["data"]["users"]["addSshKey"]["message"] is not None - assert response.json()["data"]["users"]["addSshKey"]["success"] is True - - assert response.json()["data"]["users"]["addSshKey"]["user"]["username"] == "root" - assert response.json()["data"]["users"]["addSshKey"]["user"]["sshKeys"] == [ - "ssh-ed25519 KEY test@pc", - "ssh-rsa KEY test_key@pc", - ] + output = api_add_ssh_key(authorized_client, user, key) + assert_errorcode(output, 409) -def test_graphql_add_main_ssh_key(authorized_client, some_users, mock_subprocess_popen): - response = authorized_client.post( - "/graphql", - json={ - "query": API_CREATE_SSH_KEY_MUTATION, - "variables": { - "sshInput": { - "username": "tester", - "sshKey": "ssh-rsa KEY test_key@pc", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - - assert response.json()["data"]["users"]["addSshKey"]["code"] == 201 - assert response.json()["data"]["users"]["addSshKey"]["message"] is not None - assert response.json()["data"]["users"]["addSshKey"]["success"] is True - - assert response.json()["data"]["users"]["addSshKey"]["user"]["username"] == "tester" - assert response.json()["data"]["users"]["addSshKey"]["user"]["sshKeys"] == [ - "ssh-rsa KEY test@pc", - "ssh-rsa KEY test_key@pc", - ] - - -def test_graphql_add_bad_ssh_key(authorized_client, some_users, mock_subprocess_popen): - response = authorized_client.post( - "/graphql", - json={ - "query": API_CREATE_SSH_KEY_MUTATION, - "variables": { - "sshInput": { - "username": "user1", - "sshKey": "trust me, this is the ssh key", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - - assert response.json()["data"]["users"]["addSshKey"]["code"] == 400 - assert response.json()["data"]["users"]["addSshKey"]["message"] is not None - assert response.json()["data"]["users"]["addSshKey"]["success"] is False +@pytest.mark.parametrize("user", key_users) +def test_graphql_add_bad_ssh_key(authorized_client, some_users, user): + output = api_add_ssh_key(authorized_client, user, "trust me, this is the ssh key") + assert_errorcode(output, 400) def test_graphql_add_ssh_key_nonexistent_user( @@ -231,133 +513,38 @@ def test_graphql_remove_ssh_key_unauthorized(client, some_users, mock_subprocess }, }, ) - assert response.status_code == 200 - assert response.json().get("data") is None + assert_empty(response) -def test_graphql_remove_ssh_key(authorized_client, some_users, mock_subprocess_popen): - response = authorized_client.post( - "/graphql", - json={ - "query": API_REMOVE_SSH_KEY_MUTATION, - "variables": { - "sshInput": { - "username": "user1", - "sshKey": "ssh-rsa KEY user1@pc", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None +@pytest.mark.parametrize("user", key_users) +def test_graphql_remove_ssh_key(authorized_client, no_keys, user): + keys = [ + "ssh-rsa KEY test_key@pc", + "ssh-rsa KEY2 test_key@pc", + ] + output = api_add_ssh_key(authorized_client, user, keys[0]) + output = api_add_ssh_key(authorized_client, user, keys[1]) + assert output["user"]["sshKeys"] == keys - assert response.json()["data"]["users"]["removeSshKey"]["code"] == 200 - assert response.json()["data"]["users"]["removeSshKey"]["message"] is not None - assert response.json()["data"]["users"]["removeSshKey"]["success"] is True + output = api_remove_ssh_key(authorized_client, user, keys[1]) + assert_ok(output) + assert output["user"]["username"] == user + assert output["user"]["sshKeys"] == [keys[0]] - assert ( - response.json()["data"]["users"]["removeSshKey"]["user"]["username"] == "user1" - ) - assert response.json()["data"]["users"]["removeSshKey"]["user"]["sshKeys"] == [] + if user == "root": + assert api_rootkeys(authorized_client) == [keys[0]] + else: + assert api_get_user_keys(authorized_client, user) == [keys[0]] -def test_graphql_remove_root_ssh_key( - authorized_client, some_users, mock_subprocess_popen -): - response = authorized_client.post( - "/graphql", - json={ - "query": API_REMOVE_SSH_KEY_MUTATION, - "variables": { - "sshInput": { - "username": "root", - "sshKey": "ssh-ed25519 KEY test@pc", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - - assert response.json()["data"]["users"]["removeSshKey"]["code"] == 200 - assert response.json()["data"]["users"]["removeSshKey"]["message"] is not None - assert response.json()["data"]["users"]["removeSshKey"]["success"] is True - - assert ( - response.json()["data"]["users"]["removeSshKey"]["user"]["username"] == "root" - ) - assert response.json()["data"]["users"]["removeSshKey"]["user"]["sshKeys"] == [] - - -def test_graphql_remove_main_ssh_key( - authorized_client, some_users, mock_subprocess_popen -): - response = authorized_client.post( - "/graphql", - json={ - "query": API_REMOVE_SSH_KEY_MUTATION, - "variables": { - "sshInput": { - "username": "tester", - "sshKey": "ssh-rsa KEY test@pc", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - - assert response.json()["data"]["users"]["removeSshKey"]["code"] == 200 - assert response.json()["data"]["users"]["removeSshKey"]["message"] is not None - assert response.json()["data"]["users"]["removeSshKey"]["success"] is True - - assert ( - response.json()["data"]["users"]["removeSshKey"]["user"]["username"] == "tester" - ) - assert response.json()["data"]["users"]["removeSshKey"]["user"]["sshKeys"] == [] - - -def test_graphql_remove_nonexistent_ssh_key( - authorized_client, some_users, mock_subprocess_popen -): - response = authorized_client.post( - "/graphql", - json={ - "query": API_REMOVE_SSH_KEY_MUTATION, - "variables": { - "sshInput": { - "username": "user1", - "sshKey": "ssh-rsa KEY test_key@pc", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - - assert response.json()["data"]["users"]["removeSshKey"]["code"] == 404 - assert response.json()["data"]["users"]["removeSshKey"]["message"] is not None - assert response.json()["data"]["users"]["removeSshKey"]["success"] is False +@pytest.mark.parametrize("user", key_users) +def test_graphql_remove_nonexistent_ssh_key(authorized_client, some_users, user): + output = api_remove_ssh_key(authorized_client, user, "ssh-rsa nonexistent") + assert_errorcode(output, 404) def test_graphql_remove_ssh_key_nonexistent_user( authorized_client, some_users, mock_subprocess_popen ): - response = authorized_client.post( - "/graphql", - json={ - "query": API_REMOVE_SSH_KEY_MUTATION, - "variables": { - "sshInput": { - "username": "user666", - "sshKey": "ssh-rsa KEY test_key@pc", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - - assert response.json()["data"]["users"]["removeSshKey"]["code"] == 404 - assert response.json()["data"]["users"]["removeSshKey"]["message"] is not None - assert response.json()["data"]["users"]["removeSshKey"]["success"] is False + output = api_remove_ssh_key(authorized_client, "user666", "ssh-rsa KEY test_key@pc") + assert_errorcode(output, 404) diff --git a/tests/test_graphql/test_ssh/some_users.json b/tests/test_graphql/test_ssh/some_users.json index c02d216..b81513d 100644 --- a/tests/test_graphql/test_ssh/some_users.json +++ b/tests/test_graphql/test_ssh/some_users.json @@ -1,43 +1,17 @@ { - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false + "dns": { + "provider": "CLOUDFLARE", + "useStagingACME": false }, - "bitwarden": { - "enable": false + "server": { + "provider": "HETZNER" }, - "databasePassword": "PASSWORD", - "domain": "test.tld", + "domain": "test-domain.tld", "hashedMasterPassword": "HASHED_PASSWORD", "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, "timezone": "Europe/Moscow", + "username": "tester", + "useBinds": true, "sshKeys": [ "ssh-rsa KEY test@pc" ], @@ -60,17 +34,50 @@ "hashedPassword": "HASHED_PASSWORD_3" } ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" + "autoUpgrade": { + "enable": true, + "allowReboot": true }, - "server": { - "provider": "HETZNER" + "modules": { + "bitwarden": { + "enable": true, + "location": "sdb" + }, + "gitea": { + "enable": true, + "location": "sdb" + }, + "jitsi-meet": { + "enable": true + }, + "nextcloud": { + "enable": true, + "location": "sdb" + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true, + "location": "sdb" + }, + "simple-nixos-mailserver": { + "enable": true, + "location": "sdb" + } }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" + "volumes": [ + { + "device": "/dev/sdb", + "mountPoint": "/volumes/sdb", + "fsType": "ext4" + } + ], + "ssh": { + "enable": true, + "passwordAuthentication": true, + "rootKeys": [ + "ssh-ed25519 KEY test@pc" + ] } } diff --git a/tests/test_graphql/test_system.py b/tests/test_graphql/test_system.py index 3de4816..36a1cc1 100644 --- a/tests/test_graphql/test_system.py +++ b/tests/test_graphql/test_system.py @@ -5,12 +5,8 @@ import os import pytest from tests.common import generate_system_query, read_json - - -@pytest.fixture -def domain_file(mocker, datadir): - mocker.patch("selfprivacy_api.utils.DOMAIN_FILE", datadir / "domain") - return datadir +from tests.test_graphql.common import assert_empty +from tests.test_dkim import no_dkim_file, dkim_file @pytest.fixture @@ -18,7 +14,7 @@ def turned_on(mocker, datadir): mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_on.json") assert read_json(datadir / "turned_on.json")["autoUpgrade"]["enable"] == True assert read_json(datadir / "turned_on.json")["autoUpgrade"]["allowReboot"] == True - assert read_json(datadir / "turned_on.json")["timezone"] == "Europe/Moscow" + assert read_json(datadir / "turned_on.json")["timezone"] == "Etc/UTC" return datadir @@ -27,7 +23,7 @@ def turned_off(mocker, datadir): mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_off.json") assert read_json(datadir / "turned_off.json")["autoUpgrade"]["enable"] == False assert read_json(datadir / "turned_off.json")["autoUpgrade"]["allowReboot"] == False - assert read_json(datadir / "turned_off.json")["timezone"] == "Europe/Moscow" + assert read_json(datadir / "turned_off.json")["timezone"] == "Etc/UTC" return datadir @@ -144,8 +140,7 @@ def test_graphql_get_python_version_wrong_auth( "query": generate_system_query([API_PYTHON_VERSION_INFO]), }, ) - assert response.status_code == 200 - assert response.json().get("data") is None + assert_empty(response) def test_graphql_get_python_version(authorized_client, mock_subprocess_check_output): @@ -181,8 +176,7 @@ def test_graphql_get_system_version_unauthorized( }, ) - assert response.status_code == 200 - assert response.json().get("data") is None + assert_empty(response) assert mock_subprocess_check_output.call_count == 0 @@ -251,7 +245,7 @@ def is_dns_record_in_array(records, dns_record) -> bool: def test_graphql_get_domain( - authorized_client, domain_file, mock_get_ip4, mock_get_ip6, turned_on, mock_dkim_key + authorized_client, mock_get_ip4, mock_get_ip6, turned_on, mock_dkim_key ): """Test get domain""" response = authorized_client.post( @@ -262,7 +256,9 @@ def test_graphql_get_domain( ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["system"]["domainInfo"]["domain"] == "test.tld" + assert ( + response.json()["data"]["system"]["domainInfo"]["domain"] == "test-domain.tld" + ) assert ( response.json()["data"]["system"]["domainInfo"]["hostname"] == "test-instance" ) @@ -333,6 +329,28 @@ def test_graphql_get_domain( ) +def test_graphql_get_domain_no_dkim( + authorized_client, + mock_get_ip4, + mock_get_ip6, + no_dkim_file, + turned_on, +): + """Test no DKIM file situation gets properly handled""" + response = authorized_client.post( + "/graphql", + json={ + "query": generate_system_query([API_GET_DOMAIN_INFO]), + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + dns_records = response.json()["data"]["system"]["domainInfo"]["requiredDnsRecords"] + for record in dns_records: + if record["name"] == "selector._domainkey": + raise ValueError("unexpected record found:", record) + + API_GET_TIMEZONE = """ settings { timezone @@ -348,8 +366,7 @@ def test_graphql_get_timezone_unauthorized(client, turned_on): "query": generate_system_query([API_GET_TIMEZONE]), }, ) - assert response.status_code == 200 - assert response.json().get("data") is None + assert_empty(response) def test_graphql_get_timezone(authorized_client, turned_on): @@ -362,7 +379,7 @@ def test_graphql_get_timezone(authorized_client, turned_on): ) assert response.status_code == 200 assert response.json().get("data") is not None - assert response.json()["data"]["system"]["settings"]["timezone"] == "Europe/Moscow" + assert response.json()["data"]["system"]["settings"]["timezone"] == "Etc/UTC" def test_graphql_get_timezone_on_undefined(authorized_client, undefined_config): @@ -375,9 +392,7 @@ def test_graphql_get_timezone_on_undefined(authorized_client, undefined_config): ) assert response.status_code == 200 assert response.json().get("data") is not None - assert ( - response.json()["data"]["system"]["settings"]["timezone"] == "Europe/Uzhgorod" - ) + assert response.json()["data"]["system"]["settings"]["timezone"] == "Etc/UTC" API_CHANGE_TIMEZONE_MUTATION = """ @@ -401,12 +416,11 @@ def test_graphql_change_timezone_unauthorized(client, turned_on): json={ "query": API_CHANGE_TIMEZONE_MUTATION, "variables": { - "timezone": "Europe/Moscow", + "timezone": "Etc/UTC", }, }, ) - assert response.status_code == 200 - assert response.json().get("data") is None + assert_empty(response) def test_graphql_change_timezone(authorized_client, turned_on): @@ -474,7 +488,7 @@ def test_graphql_change_timezone_without_timezone(authorized_client, turned_on): assert response.json()["data"]["system"]["changeTimezone"]["message"] is not None assert response.json()["data"]["system"]["changeTimezone"]["code"] == 400 assert response.json()["data"]["system"]["changeTimezone"]["timezone"] is None - assert read_json(turned_on / "turned_on.json")["timezone"] == "Europe/Moscow" + assert read_json(turned_on / "turned_on.json")["timezone"] == "Etc/UTC" def test_graphql_change_timezone_with_invalid_timezone(authorized_client, turned_on): @@ -494,7 +508,7 @@ def test_graphql_change_timezone_with_invalid_timezone(authorized_client, turned assert response.json()["data"]["system"]["changeTimezone"]["message"] is not None assert response.json()["data"]["system"]["changeTimezone"]["code"] == 400 assert response.json()["data"]["system"]["changeTimezone"]["timezone"] is None - assert read_json(turned_on / "turned_on.json")["timezone"] == "Europe/Moscow" + assert read_json(turned_on / "turned_on.json")["timezone"] == "Etc/UTC" API_GET_AUTO_UPGRADE_SETTINGS_QUERY = """ @@ -515,8 +529,7 @@ def test_graphql_get_auto_upgrade_unauthorized(client, turned_on): "query": generate_system_query([API_GET_AUTO_UPGRADE_SETTINGS_QUERY]), }, ) - assert response.status_code == 200 - assert response.json().get("data") is None + assert_empty(response) def test_graphql_get_auto_upgrade(authorized_client, turned_on): @@ -624,8 +637,7 @@ def test_graphql_change_auto_upgrade_unauthorized(client, turned_on): }, }, ) - assert response.status_code == 200 - assert response.json().get("data") is None + assert_empty(response) def test_graphql_change_auto_upgrade(authorized_client, turned_on): @@ -932,8 +944,7 @@ def test_graphql_pull_system_configuration_unauthorized(client, mock_subprocess_ }, ) - assert response.status_code == 200 - assert response.json().get("data") is None + assert_empty(response) assert mock_subprocess_popen.call_count == 0 diff --git a/tests/test_graphql/test_system/no_values.json b/tests/test_graphql/test_system/no_values.json index 779691f..954790c 100644 --- a/tests/test_graphql/test_system/no_values.json +++ b/tests/test_graphql/test_system/no_values.json @@ -1,55 +1,62 @@ { - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false + "dns": { + "provider": "CLOUDFLARE", + "useStagingACME": false }, - "bitwarden": { - "enable": true + "server": { + "provider": "HETZNER" }, - "databasePassword": "PASSWORD", - "domain": "test.tld", + "domain": "test-domain.tld", "hashedMasterPassword": "HASHED_PASSWORD", "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true + "timezone": "Etc/UTC", + "username": "tester", + "useBinds": true, + "sshKeys": [ + "ssh-rsa KEY test@pc" + ], + "users": [], + "autoUpgrade": {}, + "modules": { + "bitwarden": { + "enable": true, + "location": "sdb" + }, + "gitea": { + "enable": true, + "location": "sdb" + }, + "jitsi-meet": { + "enable": true + }, + "nextcloud": { + "enable": true, + "location": "sdb" + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true, + "location": "sdb" + }, + "simple-nixos-mailserver": { + "enable": true, + "location": "sdb" + } }, - "resticPassword": "PASS", + "volumes": [ + { + "device": "/dev/sdb", + "mountPoint": "/volumes/sdb", + "fsType": "ext4" + } + ], "ssh": { "enable": true, "passwordAuthentication": true, "rootKeys": [ "ssh-ed25519 KEY test@pc" ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" } } diff --git a/tests/test_graphql/test_system/turned_off.json b/tests/test_graphql/test_system/turned_off.json index 5fc287c..1453366 100644 --- a/tests/test_graphql/test_system/turned_off.json +++ b/tests/test_graphql/test_system/turned_off.json @@ -1,57 +1,65 @@ { - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false + "dns": { + "provider": "CLOUDFLARE", + "useStagingACME": false }, - "bitwarden": { - "enable": true + "server": { + "provider": "HETZNER" }, - "databasePassword": "PASSWORD", - "domain": "test.tld", + "domain": "test-domain.tld", "hashedMasterPassword": "HASHED_PASSWORD", "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true + "timezone": "Etc/UTC", + "username": "tester", + "useBinds": true, + "sshKeys": [ + "ssh-rsa KEY test@pc" + ], + "users": [], + "autoUpgrade": { + "enable": false, + "allowReboot": false }, - "resticPassword": "PASS", + "modules": { + "bitwarden": { + "enable": true, + "location": "sdb" + }, + "gitea": { + "enable": true, + "location": "sdb" + }, + "jitsi-meet": { + "enable": true + }, + "nextcloud": { + "enable": true, + "location": "sdb" + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true, + "location": "sdb" + }, + "simple-nixos-mailserver": { + "enable": true, + "location": "sdb" + } + }, + "volumes": [ + { + "device": "/dev/sdb", + "mountPoint": "/volumes/sdb", + "fsType": "ext4" + } + ], "ssh": { "enable": true, "passwordAuthentication": true, "rootKeys": [ "ssh-ed25519 KEY test@pc" ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": false, - "allowReboot": false - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" } } diff --git a/tests/test_graphql/test_system/turned_on.json b/tests/test_graphql/test_system/turned_on.json index c6b758b..2f31047 100644 --- a/tests/test_graphql/test_system/turned_on.json +++ b/tests/test_graphql/test_system/turned_on.json @@ -1,60 +1,65 @@ { - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false + "dns": { + "provider": "CLOUDFLARE", + "useStagingACME": false }, - "bitwarden": { - "enable": true + "server": { + "provider": "HETZNER" }, - "databasePassword": "PASSWORD", - "domain": "test.tld", + "domain": "test-domain.tld", "hashedMasterPassword": "HASHED_PASSWORD", "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true + "timezone": "Etc/UTC", + "username": "tester", + "useBinds": true, + "sshKeys": [ + "ssh-rsa KEY test@pc" + ], + "users": [], + "autoUpgrade": { + "enable": true, + "allowReboot": true }, - "resticPassword": "PASS", + "modules": { + "bitwarden": { + "enable": true, + "location": "sdb" + }, + "gitea": { + "enable": true, + "location": "sdb" + }, + "jitsi-meet": { + "enable": true + }, + "nextcloud": { + "enable": true, + "location": "sdb" + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true, + "location": "sdb" + }, + "simple-nixos-mailserver": { + "enable": true, + "location": "sdb" + } + }, + "volumes": [ + { + "device": "/dev/sdb", + "mountPoint": "/volumes/sdb", + "fsType": "ext4" + } + ], "ssh": { "enable": true, "passwordAuthentication": true, "rootKeys": [ "ssh-ed25519 KEY test@pc" ] - }, - "username": "tester", - "gitea": { - "enable": true - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "jitsi": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" } } diff --git a/tests/test_graphql/test_system/undefined.json b/tests/test_graphql/test_system/undefined.json index 2e31fea..89ad3ff 100644 --- a/tests/test_graphql/test_system/undefined.json +++ b/tests/test_graphql/test_system/undefined.json @@ -1,52 +1,60 @@ { - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false + "dns": { + "provider": "CLOUDFLARE", + "useStagingACME": false }, - "bitwarden": { - "enable": true + "server": { + "provider": "HETZNER" }, - "databasePassword": "PASSWORD", - "domain": "test.tld", + "domain": "test-domain.tld", "hashedMasterPassword": "HASHED_PASSWORD", "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true + "username": "tester", + "useBinds": true, + "sshKeys": [ + "ssh-rsa KEY test@pc" + ], + "users": [], + "modules": { + "bitwarden": { + "enable": true, + "location": "sdb" + }, + "gitea": { + "enable": true, + "location": "sdb" + }, + "jitsi-meet": { + "enable": true + }, + "nextcloud": { + "enable": true, + "location": "sdb" + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true, + "location": "sdb" + }, + "simple-nixos-mailserver": { + "enable": true, + "location": "sdb" + } }, - "resticPassword": "PASS", + "volumes": [ + { + "device": "/dev/sdb", + "mountPoint": "/volumes/sdb", + "fsType": "ext4" + } + ], "ssh": { "enable": true, "passwordAuthentication": true, "rootKeys": [ "ssh-ed25519 KEY test@pc" ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" } } diff --git a/tests/test_graphql/test_system_nixos_tasks.py b/tests/test_graphql/test_system_nixos_tasks.py index b292fda..4a750c4 100644 --- a/tests/test_graphql/test_system_nixos_tasks.py +++ b/tests/test_graphql/test_system_nixos_tasks.py @@ -4,12 +4,6 @@ import pytest -@pytest.fixture -def domain_file(mocker, datadir): - mocker.patch("selfprivacy_api.utils.DOMAIN_FILE", datadir / "domain") - return datadir - - class ProcessMock: """Mock subprocess.Popen""" @@ -23,15 +17,6 @@ class ProcessMock: returncode = 0 -class BrokenServiceMock(ProcessMock): - """Mock subprocess.Popen for broken service""" - - def communicate(): # pylint: disable=no-method-argument - return (b"Testing error", None) - - returncode = 3 - - @pytest.fixture def mock_subprocess_popen(mocker): mock = mocker.patch("subprocess.Popen", autospec=True, return_value=ProcessMock) diff --git a/tests/test_graphql/test_users.py b/tests/test_graphql/test_users.py index 9554195..5c6e7e4 100644 --- a/tests/test_graphql/test_users.py +++ b/tests/test_graphql/test_users.py @@ -6,6 +6,13 @@ from tests.common import ( generate_users_query, read_json, ) +from selfprivacy_api.utils import WriteUserData +from tests.test_graphql.common import ( + assert_empty, + assert_errorcode, + assert_ok, + get_data, +) invalid_usernames = [ "messagebus", @@ -88,6 +95,15 @@ def undefined_settings(mocker, datadir): return datadir +@pytest.fixture +def no_users_no_admin_nobody(undefined_settings): + datadir = undefined_settings + with WriteUserData() as data: + del data["username"] + del data["sshKeys"] + return datadir + + class ProcessMock: """Mock subprocess.Popen""" @@ -117,6 +133,17 @@ allUsers { """ +def api_all_users(authorized_client): + response = authorized_client.post( + "/graphql", + json={ + "query": generate_users_query([API_USERS_INFO]), + }, + ) + output = get_data(response)["users"]["allUsers"] + return output + + def test_graphql_get_users_unauthorized(client, some_users, mock_subprocess_popen): """Test wrong auth""" response = client.post( @@ -125,8 +152,7 @@ def test_graphql_get_users_unauthorized(client, some_users, mock_subprocess_pope "query": generate_users_query([API_USERS_INFO]), }, ) - assert response.status_code == 200 - assert response.json().get("data") is None + assert_empty(response) def test_graphql_get_some_users(authorized_client, some_users, mock_subprocess_popen): @@ -170,6 +196,38 @@ def test_graphql_get_no_users(authorized_client, no_users, mock_subprocess_popen ] +def test_graphql_get_users_undefined_but_admin(authorized_client, undefined_settings): + response = authorized_client.post( + "/graphql", + json={ + "query": generate_users_query([API_USERS_INFO]), + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + + assert len(response.json()["data"]["users"]["allUsers"]) == 1 + assert response.json()["data"]["users"]["allUsers"][0]["username"] == "tester" + assert response.json()["data"]["users"]["allUsers"][0]["sshKeys"] == [ + "ssh-rsa KEY test@pc" + ] + + +def test_graphql_get_users_undefined_no_admin( + authorized_client, no_users_no_admin_nobody +): + response = authorized_client.post( + "/graphql", + json={ + "query": generate_users_query([API_USERS_INFO]), + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + + assert len(response.json()["data"]["users"]["allUsers"]) == 0 + + API_GET_USERS = """ query TestUsers($username: String!) { users { @@ -192,12 +250,10 @@ def test_graphql_get_one_user_unauthorized(client, one_user, mock_subprocess_pop }, }, ) - assert response.status_code == 200 - assert response.json().get("data") is None + assert_empty(response) def test_graphql_get_one_user(authorized_client, one_user, mock_subprocess_popen): - response = authorized_client.post( "/graphql", json={ @@ -217,6 +273,22 @@ def test_graphql_get_one_user(authorized_client, one_user, mock_subprocess_popen ] +def test_graphql_get_some_user_undefined(authorized_client, undefined_settings): + response = authorized_client.post( + "/graphql", + json={ + "query": API_GET_USERS, + "variables": { + "username": "user1", + }, + }, + ) + assert response.status_code == 200 + assert response.json().get("data") is not None + + assert response.json()["data"]["users"]["getUser"] is None + + def test_graphql_get_some_user(authorized_client, some_users, mock_subprocess_popen): response = authorized_client.post( "/graphql", @@ -310,249 +382,135 @@ mutation createUser($user: UserMutationInput!) { """ -def test_graphql_add_user_unauthorize(client, one_user, mock_subprocess_popen): - response = client.post( +def api_add_user_json(authorized_client, user_json: dict): + # lowlevel for deeper testing of edgecases + return authorized_client.post( "/graphql", json={ "query": API_CREATE_USERS_MUTATION, "variables": { - "user": { - "username": "user2", - "password": "12345678", - }, + "user": user_json, }, }, ) - assert response.status_code == 200 - assert response.json().get("data") is None + + +def api_add_user(authorized_client, username, password): + response = api_add_user_json( + authorized_client, {"username": username, "password": password} + ) + output = get_data(response)["users"]["createUser"] + return output + + +def test_graphql_add_user_unauthorized(client, one_user, mock_subprocess_popen): + response = api_add_user_json(client, {"username": "user2", "password": "12345678"}) + assert_empty(response) def test_graphql_add_user(authorized_client, one_user, mock_subprocess_popen): - response = authorized_client.post( - "/graphql", - json={ - "query": API_CREATE_USERS_MUTATION, - "variables": { - "user": { - "username": "user2", - "password": "12345678", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None + output = api_add_user(authorized_client, "user2", password="12345678") + assert_ok(output, code=201) - assert response.json()["data"]["users"]["createUser"]["message"] is not None - assert response.json()["data"]["users"]["createUser"]["code"] == 201 - assert response.json()["data"]["users"]["createUser"]["success"] is True - - assert response.json()["data"]["users"]["createUser"]["user"]["username"] == "user2" - assert response.json()["data"]["users"]["createUser"]["user"]["sshKeys"] == [] + assert output["user"]["username"] == "user2" + assert output["user"]["sshKeys"] == [] -def test_graphql_add_undefined_settings( +def test_graphql_add_user_when_undefined_settings( authorized_client, undefined_settings, mock_subprocess_popen ): - response = authorized_client.post( - "/graphql", - json={ - "query": API_CREATE_USERS_MUTATION, - "variables": { - "user": { - "username": "user2", - "password": "12345678", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None + output = api_add_user(authorized_client, "user2", password="12345678") + assert_ok(output, code=201) - assert response.json()["data"]["users"]["createUser"]["message"] is not None - assert response.json()["data"]["users"]["createUser"]["code"] == 201 - assert response.json()["data"]["users"]["createUser"]["success"] is True - - assert response.json()["data"]["users"]["createUser"]["user"]["username"] == "user2" - assert response.json()["data"]["users"]["createUser"]["user"]["sshKeys"] == [] + assert output["user"]["username"] == "user2" + assert output["user"]["sshKeys"] == [] -def test_graphql_add_without_password( - authorized_client, one_user, mock_subprocess_popen -): - response = authorized_client.post( - "/graphql", - json={ - "query": API_CREATE_USERS_MUTATION, - "variables": { - "user": { - "username": "user2", - "password": "", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None - - assert response.json()["data"]["users"]["createUser"]["message"] is not None - assert response.json()["data"]["users"]["createUser"]["code"] == 400 - assert response.json()["data"]["users"]["createUser"]["success"] is False - - assert response.json()["data"]["users"]["createUser"]["user"] is None +users_witn_empty_fields = [ + {"username": "user2", "password": ""}, + {"username": "", "password": "12345678"}, + {"username": "", "password": ""}, +] -def test_graphql_add_without_both(authorized_client, one_user, mock_subprocess_popen): - response = authorized_client.post( - "/graphql", - json={ - "query": API_CREATE_USERS_MUTATION, - "variables": { - "user": { - "username": "", - "password": "", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None +@pytest.mark.parametrize("user_json", users_witn_empty_fields) +def test_graphql_add_with_empty_fields(authorized_client, one_user, user_json): + response = api_add_user_json(authorized_client, user_json) + output = get_data(response)["users"]["createUser"] - assert response.json()["data"]["users"]["createUser"]["message"] is not None - assert response.json()["data"]["users"]["createUser"]["code"] == 400 - assert response.json()["data"]["users"]["createUser"]["success"] is False + assert_errorcode(output, 400) + assert output["user"] is None - assert response.json()["data"]["users"]["createUser"]["user"] is None + +users_witn_undefined_fields = [ + {"username": "user2"}, + {"password": "12345678"}, + {}, +] + + +@pytest.mark.parametrize("user_json", users_witn_undefined_fields) +def test_graphql_add_with_undefined_fields(authorized_client, one_user, user_json): + # checking that all fields are mandatory + response = api_add_user_json(authorized_client, user_json) + + assert response.json()["errors"] is not None + assert response.json()["errors"] != [] @pytest.mark.parametrize("username", invalid_usernames) def test_graphql_add_system_username( authorized_client, one_user, mock_subprocess_popen, username ): - response = authorized_client.post( - "/graphql", - json={ - "query": API_CREATE_USERS_MUTATION, - "variables": { - "user": { - "username": username, - "password": "12345678", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None + output = api_add_user(authorized_client, username, password="12345678") - assert response.json()["data"]["users"]["createUser"]["message"] is not None - assert response.json()["data"]["users"]["createUser"]["code"] == 409 - assert response.json()["data"]["users"]["createUser"]["success"] is False - - assert response.json()["data"]["users"]["createUser"]["user"] is None + assert_errorcode(output, code=409) + assert output["user"] is None -def test_graphql_add_existing_user(authorized_client, one_user, mock_subprocess_popen): - response = authorized_client.post( - "/graphql", - json={ - "query": API_CREATE_USERS_MUTATION, - "variables": { - "user": { - "username": "user1", - "password": "12345678", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None +def test_graphql_add_existing_user(authorized_client, one_user): + output = api_add_user(authorized_client, "user1", password="12345678") - assert response.json()["data"]["users"]["createUser"]["message"] is not None - assert response.json()["data"]["users"]["createUser"]["code"] == 409 - assert response.json()["data"]["users"]["createUser"]["success"] is False - - assert response.json()["data"]["users"]["createUser"]["user"]["username"] == "user1" - assert ( - response.json()["data"]["users"]["createUser"]["user"]["sshKeys"][0] - == "ssh-rsa KEY user1@pc" - ) + assert_errorcode(output, code=409) + assert output["user"]["username"] == "user1" + assert output["user"]["sshKeys"][0] == "ssh-rsa KEY user1@pc" -def test_graphql_add_main_user(authorized_client, one_user, mock_subprocess_popen): - response = authorized_client.post( - "/graphql", - json={ - "query": API_CREATE_USERS_MUTATION, - "variables": { - "user": { - "username": "tester", - "password": "12345678", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None +def test_graphql_add_main_user(authorized_client, one_user): + output = api_add_user(authorized_client, "tester", password="12345678") - assert response.json()["data"]["users"]["createUser"]["message"] is not None - assert response.json()["data"]["users"]["createUser"]["code"] == 409 - assert response.json()["data"]["users"]["createUser"]["success"] is False + assert_errorcode(output, code=409) + assert output["user"]["username"] == "tester" + assert output["user"]["sshKeys"][0] == "ssh-rsa KEY test@pc" - assert ( - response.json()["data"]["users"]["createUser"]["user"]["username"] == "tester" - ) - assert ( - response.json()["data"]["users"]["createUser"]["user"]["sshKeys"][0] - == "ssh-rsa KEY test@pc" - ) + +def test_graphql_add_user_when_no_admin_defined( + authorized_client, no_users_no_admin_nobody +): + output = api_add_user(authorized_client, "tester", password="12345678") + + assert_errorcode(output, code=400) + assert output["user"] is None def test_graphql_add_long_username(authorized_client, one_user, mock_subprocess_popen): - response = authorized_client.post( - "/graphql", - json={ - "query": API_CREATE_USERS_MUTATION, - "variables": { - "user": { - "username": "a" * 32, - "password": "12345678", - }, - }, - }, - ) - assert response.json().get("data") is not None + output = api_add_user(authorized_client, "a" * 32, password="12345678") - assert response.json()["data"]["users"]["createUser"]["message"] is not None - assert response.json()["data"]["users"]["createUser"]["code"] == 400 - assert response.json()["data"]["users"]["createUser"]["success"] is False - - assert response.json()["data"]["users"]["createUser"]["user"] is None + assert_errorcode(output, code=400) + assert output["user"] is None -@pytest.mark.parametrize("username", ["", "1", "фыр", "user1@", "^-^"]) +# TODO: maybe make a username generating function to make a more comprehensive invalid username test +@pytest.mark.parametrize( + "username", ["", "1", "фыр", "user1@", "^-^", "№:%##$^&@$&^()_"] +) def test_graphql_add_invalid_username( authorized_client, one_user, mock_subprocess_popen, username ): - response = authorized_client.post( - "/graphql", - json={ - "query": API_CREATE_USERS_MUTATION, - "variables": { - "user": { - "username": username, - "password": "12345678", - }, - }, - }, - ) - assert response.status_code == 200 - assert response.json().get("data") is not None + output = api_add_user(authorized_client, username, password="12345678") - assert response.json()["data"]["users"]["createUser"]["message"] is not None - assert response.json()["data"]["users"]["createUser"]["code"] == 400 - assert response.json()["data"]["users"]["createUser"]["success"] is False - - assert response.json()["data"]["users"]["createUser"]["user"] is None + assert_errorcode(output, code=400) + assert output["user"] is None API_DELETE_USER_MUTATION = """ @@ -576,8 +534,7 @@ def test_graphql_delete_user_unauthorized(client, some_users, mock_subprocess_po "variables": {"username": "user1"}, }, ) - assert response.status_code == 200 - assert response.json().get("data") is None + assert_empty(response) def test_graphql_delete_user(authorized_client, some_users, mock_subprocess_popen): @@ -595,6 +552,11 @@ def test_graphql_delete_user(authorized_client, some_users, mock_subprocess_pope assert response.json()["data"]["users"]["deleteUser"]["message"] is not None assert response.json()["data"]["users"]["deleteUser"]["success"] is True + new_users = api_all_users(authorized_client) + assert len(new_users) == 3 + usernames = [user["username"] for user in new_users] + assert set(usernames) == set(["user2", "user3", "tester"]) + @pytest.mark.parametrize("username", ["", "def"]) def test_graphql_delete_nonexistent_users( @@ -683,8 +645,7 @@ def test_graphql_update_user_unauthorized(client, some_users, mock_subprocess_po }, }, ) - assert response.status_code == 200 - assert response.json().get("data") is None + assert_empty(response) def test_graphql_update_user(authorized_client, some_users, mock_subprocess_popen): diff --git a/tests/test_graphql/test_users/no_users.json b/tests/test_graphql/test_users/no_users.json index a40fb88..2f31047 100644 --- a/tests/test_graphql/test_users/no_users.json +++ b/tests/test_graphql/test_users/no_users.json @@ -1,59 +1,65 @@ { - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false + "dns": { + "provider": "CLOUDFLARE", + "useStagingACME": false }, - "bitwarden": { - "enable": false + "server": { + "provider": "HETZNER" }, - "databasePassword": "PASSWORD", - "domain": "test.tld", + "domain": "test-domain.tld", "hashedMasterPassword": "HASHED_PASSWORD", "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true + "timezone": "Etc/UTC", + "username": "tester", + "useBinds": true, + "sshKeys": [ + "ssh-rsa KEY test@pc" + ], + "users": [], + "autoUpgrade": { + "enable": true, + "allowReboot": true }, - "resticPassword": "PASS", + "modules": { + "bitwarden": { + "enable": true, + "location": "sdb" + }, + "gitea": { + "enable": true, + "location": "sdb" + }, + "jitsi-meet": { + "enable": true + }, + "nextcloud": { + "enable": true, + "location": "sdb" + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true, + "location": "sdb" + }, + "simple-nixos-mailserver": { + "enable": true, + "location": "sdb" + } + }, + "volumes": [ + { + "device": "/dev/sdb", + "mountPoint": "/volumes/sdb", + "fsType": "ext4" + } + ], "ssh": { "enable": true, "passwordAuthentication": true, "rootKeys": [ "ssh-ed25519 KEY test@pc" ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "users": [ - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" } } diff --git a/tests/test_graphql/test_users/one_user.json b/tests/test_graphql/test_users/one_user.json index 7e1cced..68f06f8 100644 --- a/tests/test_graphql/test_users/one_user.json +++ b/tests/test_graphql/test_users/one_user.json @@ -1,43 +1,17 @@ { - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false + "dns": { + "provider": "CLOUDFLARE", + "useStagingACME": false }, - "bitwarden": { - "enable": false + "server": { + "provider": "HETZNER" }, - "databasePassword": "PASSWORD", - "domain": "test.tld", + "domain": "test-domain.tld", "hashedMasterPassword": "HASHED_PASSWORD", "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, + "timezone": "Etc/UTC", "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", + "useBinds": true, "sshKeys": [ "ssh-rsa KEY test@pc" ], @@ -50,17 +24,50 @@ ] } ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" + "autoUpgrade": { + "enable": true, + "allowReboot": true }, - "server": { - "provider": "HETZNER" + "modules": { + "bitwarden": { + "enable": true, + "location": "sdb" + }, + "gitea": { + "enable": true, + "location": "sdb" + }, + "jitsi-meet": { + "enable": true + }, + "nextcloud": { + "enable": true, + "location": "sdb" + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true, + "location": "sdb" + }, + "simple-nixos-mailserver": { + "enable": true, + "location": "sdb" + } }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" + "volumes": [ + { + "device": "/dev/sdb", + "mountPoint": "/volumes/sdb", + "fsType": "ext4" + } + ], + "ssh": { + "enable": true, + "passwordAuthentication": true, + "rootKeys": [ + "ssh-ed25519 KEY test@pc" + ] } } diff --git a/tests/test_graphql/test_users/some_users.json b/tests/test_graphql/test_users/some_users.json index c02d216..3ad366e 100644 --- a/tests/test_graphql/test_users/some_users.json +++ b/tests/test_graphql/test_users/some_users.json @@ -1,43 +1,17 @@ { - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false + "dns": { + "provider": "CLOUDFLARE", + "useStagingACME": false }, - "bitwarden": { - "enable": false + "server": { + "provider": "HETZNER" }, - "databasePassword": "PASSWORD", - "domain": "test.tld", + "domain": "test-domain.tld", "hashedMasterPassword": "HASHED_PASSWORD", "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, + "timezone": "Etc/UTC", "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", + "useBinds": true, "sshKeys": [ "ssh-rsa KEY test@pc" ], @@ -60,17 +34,50 @@ "hashedPassword": "HASHED_PASSWORD_3" } ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" + "autoUpgrade": { + "enable": true, + "allowReboot": true }, - "server": { - "provider": "HETZNER" + "modules": { + "bitwarden": { + "enable": true, + "location": "sdb" + }, + "gitea": { + "enable": true, + "location": "sdb" + }, + "jitsi-meet": { + "enable": true + }, + "nextcloud": { + "enable": true, + "location": "sdb" + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true, + "location": "sdb" + }, + "simple-nixos-mailserver": { + "enable": true, + "location": "sdb" + } }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" + "volumes": [ + { + "device": "/dev/sdb", + "mountPoint": "/volumes/sdb", + "fsType": "ext4" + } + ], + "ssh": { + "enable": true, + "passwordAuthentication": true, + "rootKeys": [ + "ssh-ed25519 KEY test@pc" + ] } } diff --git a/tests/test_graphql/test_users/undefined.json b/tests/test_graphql/test_users/undefined.json index ae9cd9e..26e3678 100644 --- a/tests/test_graphql/test_users/undefined.json +++ b/tests/test_graphql/test_users/undefined.json @@ -1,57 +1,64 @@ { - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false + "dns": { + "provider": "CLOUDFLARE", + "useStagingACME": false }, - "bitwarden": { - "enable": false + "server": { + "provider": "HETZNER" }, - "databasePassword": "PASSWORD", - "domain": "test.tld", + "domain": "test-domain.tld", "hashedMasterPassword": "HASHED_PASSWORD", "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true + "timezone": "Etc/UTC", + "username": "tester", + "useBinds": true, + "sshKeys": [ + "ssh-rsa KEY test@pc" + ], + "autoUpgrade": { + "enable": true, + "allowReboot": true }, - "resticPassword": "PASS", + "modules": { + "bitwarden": { + "enable": true, + "location": "sdb" + }, + "gitea": { + "enable": true, + "location": "sdb" + }, + "jitsi-meet": { + "enable": true + }, + "nextcloud": { + "enable": true, + "location": "sdb" + }, + "ocserv": { + "enable": true + }, + "pleroma": { + "enable": true, + "location": "sdb" + }, + "simple-nixos-mailserver": { + "enable": true, + "location": "sdb" + } + }, + "volumes": [ + { + "device": "/dev/sdb", + "mountPoint": "/volumes/sdb", + "fsType": "ext4" + } + ], "ssh": { "enable": true, "passwordAuthentication": true, "rootKeys": [ "ssh-ed25519 KEY test@pc" ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" } } diff --git a/tests/test_jobs.py b/tests/test_jobs.py index 0a4271e..64cf457 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -1,6 +1,7 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument import pytest +from time import sleep from selfprivacy_api.jobs import Jobs, JobStatus import selfprivacy_api.jobs as jobsmodule @@ -49,6 +50,20 @@ def test_remove_get_nonexistent(jobs_with_one_job): assert jobs_with_one_job.get_job(uid_str) is None +def test_set_zeroing_ttl(jobs_with_one_job): + test_job = jobs_with_one_job.get_jobs()[0] + jobs_with_one_job.set_expiration(test_job, 0) + assert jobs_with_one_job.get_jobs() == [] + + +def test_not_zeroing_ttl(jobs_with_one_job): + test_job = jobs_with_one_job.get_jobs()[0] + jobs_with_one_job.set_expiration(test_job, 1) + assert len(jobs_with_one_job.get_jobs()) == 1 + sleep(1.2) + assert len(jobs_with_one_job.get_jobs()) == 0 + + def test_jobs(jobs_with_one_job): jobs = jobs_with_one_job test_job = jobs_with_one_job.get_jobs()[0] diff --git a/tests/test_graphql/test_localsecret.py b/tests/test_localsecret.py similarity index 100% rename from tests/test_graphql/test_localsecret.py rename to tests/test_localsecret.py diff --git a/tests/test_models.py b/tests/test_models.py index 2263e82..f01bb4f 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -1,18 +1,25 @@ import pytest -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from selfprivacy_api.models.tokens.recovery_key import RecoveryKey from selfprivacy_api.models.tokens.new_device_key import NewDeviceKey -def test_recovery_key_expired(): - expiration = datetime.now() - timedelta(minutes=5) +def test_recovery_key_expired_utcnaive(): + expiration = datetime.utcnow() - timedelta(minutes=5) + key = RecoveryKey.generate(expiration=expiration, uses_left=2) + assert not key.is_valid() + + +def test_recovery_key_expired_tzaware(): + expiration = datetime.now(timezone.utc) - timedelta(minutes=5) key = RecoveryKey.generate(expiration=expiration, uses_left=2) assert not key.is_valid() def test_new_device_key_expired(): - expiration = datetime.now() - timedelta(minutes=5) + # key is supposed to be tzaware + expiration = datetime.now(timezone.utc) - timedelta(minutes=5) key = NewDeviceKey.generate() key.expires_at = expiration assert not key.is_valid() diff --git a/tests/test_graphql/test_repository/test_tokens_repository.py b/tests/test_repository/test_tokens_repository.py similarity index 87% rename from tests/test_graphql/test_repository/test_tokens_repository.py rename to tests/test_repository/test_tokens_repository.py index 020a868..0ffc76b 100644 --- a/tests/test_graphql/test_repository/test_tokens_repository.py +++ b/tests/test_repository/test_tokens_repository.py @@ -2,7 +2,7 @@ # pylint: disable=unused-argument # pylint: disable=missing-function-docstring -from datetime import datetime, timedelta +from datetime import datetime, timezone from mnemonic import Mnemonic import pytest @@ -16,13 +16,15 @@ from selfprivacy_api.repositories.tokens.exceptions import ( TokenNotFound, NewDeviceKeyNotFound, ) -from selfprivacy_api.repositories.tokens.json_tokens_repository import ( - JsonTokensRepository, -) + from selfprivacy_api.repositories.tokens.redis_tokens_repository import ( RedisTokensRepository, ) -from tests.common import read_json +from selfprivacy_api.repositories.tokens.abstract_tokens_repository import ( + AbstractTokensRepository, +) + +from tests.common import ten_minutes_into_past, ten_minutes_into_future ORIGINAL_DEVICE_NAMES = [ @@ -32,24 +34,15 @@ ORIGINAL_DEVICE_NAMES = [ "forth_token", ] +TEST_DATE = datetime(2022, 7, 15, 17, 41, 31, 675698, timezone.utc) +# tokens are not tz-aware +TOKEN_TEST_DATE = datetime(2022, 7, 15, 17, 41, 31, 675698) + def mnemonic_from_hex(hexkey): return Mnemonic(language="english").to_mnemonic(bytes.fromhex(hexkey)) -@pytest.fixture -def empty_keys(mocker, datadir): - mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=datadir / "empty_keys.json") - assert read_json(datadir / "empty_keys.json")["tokens"] == [ - { - "token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", - "name": "primary_token", - "date": "2022-07-15 17:41:31.675698", - } - ] - return datadir - - @pytest.fixture def mock_new_device_key_generate(mocker): mock = mocker.patch( @@ -57,8 +50,8 @@ def mock_new_device_key_generate(mocker): autospec=True, return_value=NewDeviceKey( key="43478d05b35e4781598acd76e33832bb", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), - expires_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + created_at=TEST_DATE, + expires_at=TEST_DATE, ), ) return mock @@ -72,8 +65,8 @@ def mock_new_device_key_generate_for_mnemonic(mocker): autospec=True, return_value=NewDeviceKey( key="2237238de23dc71ab558e317bdb8ff8e", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), - expires_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + created_at=TEST_DATE, + expires_at=TEST_DATE, ), ) return mock @@ -100,7 +93,7 @@ def mock_recovery_key_generate_invalid(mocker): autospec=True, return_value=RecoveryKey( key="889bf49c1d3199d71a2e704718772bd53a422020334db051", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + created_at=TEST_DATE, expires_at=None, uses_left=0, ), @@ -116,7 +109,7 @@ def mock_token_generate(mocker): return_value=Token( token="ZuLNKtnxDeq6w2dpOJhbB3iat_sJLPTPl_rN5uc5MvM", device_name="IamNewDevice", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + created_at=TOKEN_TEST_DATE, ), ) return mock @@ -129,7 +122,7 @@ def mock_recovery_key_generate(mocker): autospec=True, return_value=RecoveryKey( key="889bf49c1d3199d71a2e704718772bd53a422020334db051", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + created_at=TEST_DATE, expires_at=None, uses_left=1, ), @@ -137,27 +130,8 @@ def mock_recovery_key_generate(mocker): return mock -@pytest.fixture -def empty_json_repo(empty_keys): - repo = JsonTokensRepository() - for token in repo.get_tokens(): - repo.delete_token(token) - assert repo.get_tokens() == [] - return repo - - -@pytest.fixture -def empty_redis_repo(): - repo = RedisTokensRepository() - repo.reset() - assert repo.get_tokens() == [] - return repo - - -@pytest.fixture(params=["json", "redis"]) -def empty_repo(request, empty_json_repo, empty_redis_repo): - if request.param == "json": - return empty_json_repo +@pytest.fixture(params=["redis"]) +def empty_repo(request, empty_redis_repo): if request.param == "redis": return empty_redis_repo # return empty_json_repo @@ -250,13 +224,13 @@ def test_create_token(empty_repo, mock_token_generate): assert repo.create_token(device_name="IamNewDevice") == Token( token="ZuLNKtnxDeq6w2dpOJhbB3iat_sJLPTPl_rN5uc5MvM", device_name="IamNewDevice", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + created_at=TOKEN_TEST_DATE, ) assert repo.get_tokens() == [ Token( token="ZuLNKtnxDeq6w2dpOJhbB3iat_sJLPTPl_rN5uc5MvM", device_name="IamNewDevice", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + created_at=TOKEN_TEST_DATE, ) ] @@ -292,7 +266,7 @@ def test_delete_not_found_token(some_tokens_repo): input_token = Token( token="imbadtoken", device_name="primary_token", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + created_at=TEST_DATE, ) with pytest.raises(TokenNotFound): assert repo.delete_token(input_token) is None @@ -321,7 +295,7 @@ def test_refresh_not_found_token(some_tokens_repo, mock_token_generate): input_token = Token( token="idontknowwhoiam", device_name="tellmewhoiam?", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + created_at=TEST_DATE, ) with pytest.raises(TokenNotFound): @@ -345,7 +319,7 @@ def test_create_get_recovery_key(some_tokens_repo, mock_recovery_key_generate): assert repo.create_recovery_key(uses_left=1, expiration=None) is not None assert repo.get_recovery_key() == RecoveryKey( key="889bf49c1d3199d71a2e704718772bd53a422020334db051", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + created_at=TEST_DATE, expires_at=None, uses_left=1, ) @@ -384,10 +358,13 @@ def test_use_mnemonic_expired_recovery_key( some_tokens_repo, ): repo = some_tokens_repo - expiration = datetime.now() - timedelta(minutes=5) + expiration = ten_minutes_into_past() assert repo.create_recovery_key(uses_left=2, expiration=expiration) is not None recovery_key = repo.get_recovery_key() - assert recovery_key.expires_at == expiration + # TODO: do not ignore timezone once json backend is deleted + assert recovery_key.expires_at.replace(tzinfo=None) == expiration.replace( + tzinfo=None + ) assert not repo.is_recovery_key_valid() with pytest.raises(RecoveryKeyNotFound): @@ -484,8 +461,8 @@ def test_get_new_device_key(some_tokens_repo, mock_new_device_key_generate): assert repo.get_new_device_key() == NewDeviceKey( key="43478d05b35e4781598acd76e33832bb", - created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), - expires_at=datetime(2022, 7, 15, 17, 41, 31, 675698), + created_at=TEST_DATE, + expires_at=TEST_DATE, ) @@ -561,7 +538,7 @@ def test_use_mnemonic_expired_new_device_key( some_tokens_repo, ): repo = some_tokens_repo - expiration = datetime.now() - timedelta(minutes=5) + expiration = ten_minutes_into_past() key = repo.get_new_device_key() assert key is not None @@ -588,3 +565,15 @@ def test_use_mnemonic_new_device_key_when_empty(empty_repo): ) is None ) + + +def assert_identical( + repo_a: AbstractTokensRepository, repo_b: AbstractTokensRepository +): + tokens_a = repo_a.get_tokens() + tokens_b = repo_b.get_tokens() + assert len(tokens_a) == len(tokens_b) + for token in tokens_a: + assert token in tokens_b + assert repo_a.get_recovery_key() == repo_b.get_recovery_key() + assert repo_a._get_stored_new_device_key() == repo_b._get_stored_new_device_key() diff --git a/tests/test_rest_endpoints/data/jobs.json b/tests/test_rest_endpoints/data/jobs.json deleted file mode 100644 index 0967ef4..0000000 --- a/tests/test_rest_endpoints/data/jobs.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/tests/test_rest_endpoints/data/tokens.json b/tests/test_rest_endpoints/data/tokens.json deleted file mode 100644 index 9be9d02..0000000 --- a/tests/test_rest_endpoints/data/tokens.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "tokens": [ - { - "token": "TEST_TOKEN", - "name": "test_token", - "date": "2022-01-14 08:31:10.789314" - }, - { - "token": "TEST_TOKEN2", - "name": "test_token2", - "date": "2022-01-14 08:31:10.789314" - } - ] -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/data/tokens.json b/tests/test_rest_endpoints/services/data/tokens.json deleted file mode 100644 index 9d35420..0000000 --- a/tests/test_rest_endpoints/services/data/tokens.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "tokens": [ - { - "token": "TEST_TOKEN", - "name": "Test Token", - "date": "2022-01-14 08:31:10.789314" - } - ] -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_bitwarden.py b/tests/test_rest_endpoints/services/test_bitwarden.py deleted file mode 100644 index 3977253..0000000 --- a/tests/test_rest_endpoints/services/test_bitwarden.py +++ /dev/null @@ -1,125 +0,0 @@ -import json -import pytest - - -def read_json(file_path): - with open(file_path, "r") as f: - return json.load(f) - - -############################################################################### - - -@pytest.fixture -def bitwarden_off(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_off.json") - assert read_json(datadir / "turned_off.json")["bitwarden"]["enable"] == False - return datadir - - -@pytest.fixture -def bitwarden_on(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_on.json") - assert read_json(datadir / "turned_on.json")["bitwarden"]["enable"] == True - return datadir - - -@pytest.fixture -def bitwarden_enable_undefined(mocker, datadir): - mocker.patch( - "selfprivacy_api.utils.USERDATA_FILE", new=datadir / "enable_undefined.json" - ) - assert "enable" not in read_json(datadir / "enable_undefined.json")["bitwarden"] - return datadir - - -@pytest.fixture -def bitwarden_undefined(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "undefined.json") - assert "bitwarden" not in read_json(datadir / "undefined.json") - return datadir - - -############################################################################### - - -@pytest.mark.parametrize("endpoint", ["enable", "disable"]) -def test_unauthorized(client, bitwarden_off, endpoint): - response = client.post(f"/services/bitwarden/{endpoint}") - assert response.status_code == 401 - - -@pytest.mark.parametrize("endpoint", ["enable", "disable"]) -def test_illegal_methods(authorized_client, bitwarden_off, endpoint): - response = authorized_client.get(f"/services/bitwarden/{endpoint}") - assert response.status_code == 405 - response = authorized_client.put(f"/services/bitwarden/{endpoint}") - assert response.status_code == 405 - response = authorized_client.delete(f"/services/bitwarden/{endpoint}") - assert response.status_code == 405 - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_switch_from_off(authorized_client, bitwarden_off, endpoint, target_file): - response = authorized_client.post(f"/services/bitwarden/{endpoint}") - assert response.status_code == 200 - assert read_json(bitwarden_off / "turned_off.json") == read_json( - bitwarden_off / target_file - ) - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_switch_from_on(authorized_client, bitwarden_on, endpoint, target_file): - response = authorized_client.post(f"/services/bitwarden/{endpoint}") - assert response.status_code == 200 - assert read_json(bitwarden_on / "turned_on.json") == read_json( - bitwarden_on / target_file - ) - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_switch_twice(authorized_client, bitwarden_off, endpoint, target_file): - response = authorized_client.post(f"/services/bitwarden/{endpoint}") - assert response.status_code == 200 - response = authorized_client.post(f"/services/bitwarden/{endpoint}") - assert response.status_code == 200 - assert read_json(bitwarden_off / "turned_off.json") == read_json( - bitwarden_off / target_file - ) - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_on_attribute_deleted( - authorized_client, bitwarden_enable_undefined, endpoint, target_file -): - response = authorized_client.post(f"/services/bitwarden/{endpoint}") - assert response.status_code == 200 - assert read_json(bitwarden_enable_undefined / "enable_undefined.json") == read_json( - bitwarden_enable_undefined / target_file - ) - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_on_bitwarden_undefined( - authorized_client, bitwarden_undefined, endpoint, target_file -): - response = authorized_client.post(f"/services/bitwarden/{endpoint}") - assert response.status_code == 200 - assert read_json(bitwarden_undefined / "undefined.json") == read_json( - bitwarden_undefined / target_file - ) diff --git a/tests/test_rest_endpoints/services/test_bitwarden/enable_undefined.json b/tests/test_rest_endpoints/services/test_bitwarden/enable_undefined.json deleted file mode 100644 index 1a95e85..0000000 --- a/tests/test_rest_endpoints/services/test_bitwarden/enable_undefined.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_bitwarden/turned_off.json b/tests/test_rest_endpoints/services/test_bitwarden/turned_off.json deleted file mode 100644 index c1691ea..0000000 --- a/tests/test_rest_endpoints/services/test_bitwarden/turned_off.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_bitwarden/turned_on.json b/tests/test_rest_endpoints/services/test_bitwarden/turned_on.json deleted file mode 100644 index 42999d8..0000000 --- a/tests/test_rest_endpoints/services/test_bitwarden/turned_on.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": true - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_bitwarden/undefined.json b/tests/test_rest_endpoints/services/test_bitwarden/undefined.json deleted file mode 100644 index ee288c2..0000000 --- a/tests/test_rest_endpoints/services/test_bitwarden/undefined.json +++ /dev/null @@ -1,54 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_gitea.py b/tests/test_rest_endpoints/services/test_gitea.py deleted file mode 100644 index 0a50c19..0000000 --- a/tests/test_rest_endpoints/services/test_gitea.py +++ /dev/null @@ -1,121 +0,0 @@ -import json -import pytest - - -def read_json(file_path): - with open(file_path, "r") as f: - return json.load(f) - - -############################################################################### - - -@pytest.fixture -def gitea_off(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_off.json") - assert read_json(datadir / "turned_off.json")["gitea"]["enable"] == False - return datadir - - -@pytest.fixture -def gitea_on(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_on.json") - assert read_json(datadir / "turned_on.json")["gitea"]["enable"] == True - return datadir - - -@pytest.fixture -def gitea_enable_undefined(mocker, datadir): - mocker.patch( - "selfprivacy_api.utils.USERDATA_FILE", new=datadir / "enable_undefined.json" - ) - assert "enable" not in read_json(datadir / "enable_undefined.json")["gitea"] - return datadir - - -@pytest.fixture -def gitea_undefined(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "undefined.json") - assert "gitea" not in read_json(datadir / "undefined.json") - return datadir - - -############################################################################### - - -@pytest.mark.parametrize("endpoint", ["enable", "disable"]) -def test_unauthorized(client, gitea_off, endpoint): - response = client.post(f"/services/gitea/{endpoint}") - assert response.status_code == 401 - - -@pytest.mark.parametrize("endpoint", ["enable", "disable"]) -def test_illegal_methods(authorized_client, gitea_off, endpoint): - response = authorized_client.get(f"/services/gitea/{endpoint}") - assert response.status_code == 405 - response = authorized_client.put(f"/services/gitea/{endpoint}") - assert response.status_code == 405 - response = authorized_client.delete(f"/services/gitea/{endpoint}") - assert response.status_code == 405 - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_switch_from_off(authorized_client, gitea_off, endpoint, target_file): - response = authorized_client.post(f"/services/gitea/{endpoint}") - assert response.status_code == 200 - assert read_json(gitea_off / "turned_off.json") == read_json( - gitea_off / target_file - ) - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_switch_from_on(authorized_client, gitea_on, endpoint, target_file): - response = authorized_client.post(f"/services/gitea/{endpoint}") - assert response.status_code == 200 - assert read_json(gitea_on / "turned_on.json") == read_json(gitea_on / target_file) - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_switch_twice(authorized_client, gitea_off, endpoint, target_file): - response = authorized_client.post(f"/services/gitea/{endpoint}") - assert response.status_code == 200 - response = authorized_client.post(f"/services/gitea/{endpoint}") - assert response.status_code == 200 - assert read_json(gitea_off / "turned_off.json") == read_json( - gitea_off / target_file - ) - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_on_attribute_deleted( - authorized_client, gitea_enable_undefined, endpoint, target_file -): - response = authorized_client.post(f"/services/gitea/{endpoint}") - assert response.status_code == 200 - assert read_json(gitea_enable_undefined / "enable_undefined.json") == read_json( - gitea_enable_undefined / target_file - ) - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_on_gitea_undefined(authorized_client, gitea_undefined, endpoint, target_file): - response = authorized_client.post(f"/services/gitea/{endpoint}") - assert response.status_code == 200 - assert read_json(gitea_undefined / "undefined.json") == read_json( - gitea_undefined / target_file - ) diff --git a/tests/test_rest_endpoints/services/test_gitea/enable_undefined.json b/tests/test_rest_endpoints/services/test_gitea/enable_undefined.json deleted file mode 100644 index f9fb878..0000000 --- a/tests/test_rest_endpoints/services/test_gitea/enable_undefined.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_gitea/turned_off.json b/tests/test_rest_endpoints/services/test_gitea/turned_off.json deleted file mode 100644 index c1691ea..0000000 --- a/tests/test_rest_endpoints/services/test_gitea/turned_off.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_gitea/turned_on.json b/tests/test_rest_endpoints/services/test_gitea/turned_on.json deleted file mode 100644 index f9a1eaf..0000000 --- a/tests/test_rest_endpoints/services/test_gitea/turned_on.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": true - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_gitea/undefined.json b/tests/test_rest_endpoints/services/test_gitea/undefined.json deleted file mode 100644 index a50a070..0000000 --- a/tests/test_rest_endpoints/services/test_gitea/undefined.json +++ /dev/null @@ -1,54 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_mailserver.py b/tests/test_rest_endpoints/services/test_mailserver.py deleted file mode 100644 index 2803683..0000000 --- a/tests/test_rest_endpoints/services/test_mailserver.py +++ /dev/null @@ -1,102 +0,0 @@ -import base64 -import json -import pytest - -from selfprivacy_api.utils import get_dkim_key - -############################################################################### - - -class ProcessMock: - """Mock subprocess.Popen""" - - def __init__(self, args, **kwargs): - self.args = args - self.kwargs = kwargs - - def communicate(): - return ( - b'selector._domainkey\tIN\tTXT\t( "v=DKIM1; k=rsa; "\n\t "p=MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDNn/IhEz1SxgHxxxI8vlPYC2dNueiLe1GC4SYz8uHimC8SDkMvAwm7rqi2SimbFgGB5nccCNOqCkrIqJTCB9vufqBnVKAjshHqpOr5hk4JJ1T/AGQKWinstmDbfTLPYTbU8ijZrwwGeqQLlnXR5nSN0GB9GazheA9zaPsT6PV+aQIDAQAB" ) ; ----- DKIM key selector for example.com\n', - None, - ) - - -class NoFileMock(ProcessMock): - def communicate(): - return (b"", None) - - -@pytest.fixture -def mock_subproccess_popen(mocker): - mock = mocker.patch("subprocess.Popen", autospec=True, return_value=ProcessMock) - mocker.patch( - "selfprivacy_api.rest.services.get_domain", - autospec=True, - return_value="example.com", - ) - mocker.patch("os.path.exists", autospec=True, return_value=True) - return mock - - -@pytest.fixture -def mock_no_file(mocker): - mock = mocker.patch("subprocess.Popen", autospec=True, return_value=NoFileMock) - mocker.patch( - "selfprivacy_api.rest.services.get_domain", - autospec=True, - return_value="example.com", - ) - mocker.patch("os.path.exists", autospec=True, return_value=False) - return mock - - -############################################################################### - - -def test_unauthorized(client, mock_subproccess_popen): - """Test unauthorized""" - response = client.get("/services/mailserver/dkim") - assert response.status_code == 401 - - -def test_illegal_methods(authorized_client, mock_subproccess_popen): - response = authorized_client.post("/services/mailserver/dkim") - assert response.status_code == 405 - response = authorized_client.put("/services/mailserver/dkim") - assert response.status_code == 405 - response = authorized_client.delete("/services/mailserver/dkim") - assert response.status_code == 405 - - -def test_get_dkim_key(mock_subproccess_popen): - """Test DKIM key""" - dkim_key = get_dkim_key("example.com") - assert ( - dkim_key - == "v=DKIM1; k=rsa; p=MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDNn/IhEz1SxgHxxxI8vlPYC2dNueiLe1GC4SYz8uHimC8SDkMvAwm7rqi2SimbFgGB5nccCNOqCkrIqJTCB9vufqBnVKAjshHqpOr5hk4JJ1T/AGQKWinstmDbfTLPYTbU8ijZrwwGeqQLlnXR5nSN0GB9GazheA9zaPsT6PV+aQIDAQAB" - ) - assert mock_subproccess_popen.call_args[0][0] == [ - "cat", - "/var/dkim/example.com.selector.txt", - ] - - -def test_dkim_key(authorized_client, mock_subproccess_popen): - """Test old REST DKIM key endpoint""" - response = authorized_client.get("/services/mailserver/dkim") - assert response.status_code == 200 - assert ( - base64.b64decode(response.text) - == b'selector._domainkey\tIN\tTXT\t( "v=DKIM1; k=rsa; "\n\t "p=MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDNn/IhEz1SxgHxxxI8vlPYC2dNueiLe1GC4SYz8uHimC8SDkMvAwm7rqi2SimbFgGB5nccCNOqCkrIqJTCB9vufqBnVKAjshHqpOr5hk4JJ1T/AGQKWinstmDbfTLPYTbU8ijZrwwGeqQLlnXR5nSN0GB9GazheA9zaPsT6PV+aQIDAQAB" ) ; ----- DKIM key selector for example.com\n' - ) - assert mock_subproccess_popen.call_args[0][0] == [ - "cat", - "/var/dkim/example.com.selector.txt", - ] - - -def test_no_dkim_key(authorized_client, mock_no_file): - """Test no DKIM key""" - response = authorized_client.get("/services/mailserver/dkim") - assert response.status_code == 404 - assert mock_no_file.called == False diff --git a/tests/test_rest_endpoints/services/test_nextcloud.py b/tests/test_rest_endpoints/services/test_nextcloud.py deleted file mode 100644 index b05c363..0000000 --- a/tests/test_rest_endpoints/services/test_nextcloud.py +++ /dev/null @@ -1,123 +0,0 @@ -import json -import pytest - - -def read_json(file_path): - with open(file_path, "r") as f: - return json.load(f) - - -############################################################################### - - -@pytest.fixture -def nextcloud_off(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_off.json") - assert read_json(datadir / "turned_off.json")["nextcloud"]["enable"] == False - return datadir - - -@pytest.fixture -def nextcloud_on(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_on.json") - assert read_json(datadir / "turned_on.json")["nextcloud"]["enable"] == True - return datadir - - -@pytest.fixture -def nextcloud_enable_undefined(mocker, datadir): - mocker.patch( - "selfprivacy_api.utils.USERDATA_FILE", new=datadir / "enable_undefined.json" - ) - assert "enable" not in read_json(datadir / "enable_undefined.json")["nextcloud"] - return datadir - - -@pytest.fixture -def nextcloud_undefined(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "undefined.json") - assert "nextcloud" not in read_json(datadir / "undefined.json") - return datadir - - -############################################################################### - - -@pytest.mark.parametrize("endpoint", ["enable", "disable"]) -def test_unauthorized(client, nextcloud_off, endpoint): - response = client.post(f"/services/nextcloud/{endpoint}") - assert response.status_code == 401 - - -@pytest.mark.parametrize("endpoint", ["enable", "disable"]) -def test_illegal_methods(authorized_client, nextcloud_off, endpoint): - response = authorized_client.get(f"/services/nextcloud/{endpoint}") - assert response.status_code == 405 - response = authorized_client.put(f"/services/nextcloud/{endpoint}") - assert response.status_code == 405 - response = authorized_client.delete(f"/services/nextcloud/{endpoint}") - assert response.status_code == 405 - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_switch_from_off(authorized_client, nextcloud_off, endpoint, target_file): - response = authorized_client.post(f"/services/nextcloud/{endpoint}") - assert response.status_code == 200 - assert read_json(nextcloud_off / "turned_off.json") == read_json( - nextcloud_off / target_file - ) - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_switch_from_on(authorized_client, nextcloud_on, endpoint, target_file): - response = authorized_client.post(f"/services/nextcloud/{endpoint}") - assert response.status_code == 200 - assert read_json(nextcloud_on / "turned_on.json") == read_json( - nextcloud_on / target_file - ) - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_switch_twice(authorized_client, nextcloud_off, endpoint, target_file): - response = authorized_client.post(f"/services/nextcloud/{endpoint}") - assert response.status_code == 200 - response = authorized_client.post(f"/services/nextcloud/{endpoint}") - assert response.status_code == 200 - assert read_json(nextcloud_off / "turned_off.json") == read_json( - nextcloud_off / target_file - ) - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_on_attribute_deleted( - authorized_client, nextcloud_enable_undefined, endpoint, target_file -): - response = authorized_client.post(f"/services/nextcloud/{endpoint}") - assert response.status_code == 200 - assert read_json(nextcloud_enable_undefined / "enable_undefined.json") == read_json( - nextcloud_enable_undefined / target_file - ) - - -@pytest.mark.parametrize("endpoint,target", [("enable", True), ("disable", False)]) -def test_on_nextcloud_undefined( - authorized_client, nextcloud_undefined, endpoint, target -): - response = authorized_client.post(f"/services/nextcloud/{endpoint}") - assert response.status_code == 200 - assert ( - read_json(nextcloud_undefined / "undefined.json")["nextcloud"]["enable"] - == target - ) diff --git a/tests/test_rest_endpoints/services/test_nextcloud/enable_undefined.json b/tests/test_rest_endpoints/services/test_nextcloud/enable_undefined.json deleted file mode 100644 index 19f1f2d..0000000 --- a/tests/test_rest_endpoints/services/test_nextcloud/enable_undefined.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN" - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_nextcloud/turned_off.json b/tests/test_rest_endpoints/services/test_nextcloud/turned_off.json deleted file mode 100644 index b80ad9e..0000000 --- a/tests/test_rest_endpoints/services/test_nextcloud/turned_off.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": false - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_nextcloud/turned_on.json b/tests/test_rest_endpoints/services/test_nextcloud/turned_on.json deleted file mode 100644 index c1691ea..0000000 --- a/tests/test_rest_endpoints/services/test_nextcloud/turned_on.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_nextcloud/undefined.json b/tests/test_rest_endpoints/services/test_nextcloud/undefined.json deleted file mode 100644 index 46c09f3..0000000 --- a/tests/test_rest_endpoints/services/test_nextcloud/undefined.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ocserv.py b/tests/test_rest_endpoints/services/test_ocserv.py deleted file mode 100644 index 8f43e70..0000000 --- a/tests/test_rest_endpoints/services/test_ocserv.py +++ /dev/null @@ -1,123 +0,0 @@ -import json -import pytest - - -def read_json(file_path): - with open(file_path, "r") as f: - return json.load(f) - - -############################################################################### - - -@pytest.fixture -def ocserv_off(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_off.json") - assert read_json(datadir / "turned_off.json")["ocserv"]["enable"] == False - return datadir - - -@pytest.fixture -def ocserv_on(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_on.json") - assert read_json(datadir / "turned_on.json")["ocserv"]["enable"] == True - return datadir - - -@pytest.fixture -def ocserv_enable_undefined(mocker, datadir): - mocker.patch( - "selfprivacy_api.utils.USERDATA_FILE", new=datadir / "enable_undefined.json" - ) - assert "enable" not in read_json(datadir / "enable_undefined.json")["ocserv"] - return datadir - - -@pytest.fixture -def ocserv_undefined(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "undefined.json") - assert "ocserv" not in read_json(datadir / "undefined.json") - return datadir - - -############################################################################### - - -@pytest.mark.parametrize("endpoint", ["enable", "disable"]) -def test_unauthorized(client, ocserv_off, endpoint): - response = client.post(f"/services/ocserv/{endpoint}") - assert response.status_code == 401 - - -@pytest.mark.parametrize("endpoint", ["enable", "disable"]) -def test_illegal_methods(authorized_client, ocserv_off, endpoint): - response = authorized_client.get(f"/services/ocserv/{endpoint}") - assert response.status_code == 405 - response = authorized_client.put(f"/services/ocserv/{endpoint}") - assert response.status_code == 405 - response = authorized_client.delete(f"/services/ocserv/{endpoint}") - assert response.status_code == 405 - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_switch_from_off(authorized_client, ocserv_off, endpoint, target_file): - response = authorized_client.post(f"/services/ocserv/{endpoint}") - assert response.status_code == 200 - assert read_json(ocserv_off / "turned_off.json") == read_json( - ocserv_off / target_file - ) - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_switch_from_on(authorized_client, ocserv_on, endpoint, target_file): - response = authorized_client.post(f"/services/ocserv/{endpoint}") - assert response.status_code == 200 - assert read_json(ocserv_on / "turned_on.json") == read_json(ocserv_on / target_file) - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_switch_twice(authorized_client, ocserv_off, endpoint, target_file): - response = authorized_client.post(f"/services/ocserv/{endpoint}") - assert response.status_code == 200 - response = authorized_client.post(f"/services/ocserv/{endpoint}") - assert response.status_code == 200 - assert read_json(ocserv_off / "turned_off.json") == read_json( - ocserv_off / target_file - ) - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_on_attribute_deleted( - authorized_client, ocserv_enable_undefined, endpoint, target_file -): - response = authorized_client.post(f"/services/ocserv/{endpoint}") - assert response.status_code == 200 - assert read_json(ocserv_enable_undefined / "enable_undefined.json") == read_json( - ocserv_enable_undefined / target_file - ) - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_on_ocserv_undefined( - authorized_client, ocserv_undefined, endpoint, target_file -): - response = authorized_client.post(f"/services/ocserv/{endpoint}") - assert response.status_code == 200 - assert read_json(ocserv_undefined / "undefined.json") == read_json( - ocserv_undefined / target_file - ) diff --git a/tests/test_rest_endpoints/services/test_ocserv/enable_undefined.json b/tests/test_rest_endpoints/services/test_ocserv/enable_undefined.json deleted file mode 100644 index e080110..0000000 --- a/tests/test_rest_endpoints/services/test_ocserv/enable_undefined.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": false - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ocserv/turned_off.json b/tests/test_rest_endpoints/services/test_ocserv/turned_off.json deleted file mode 100644 index 1c08123..0000000 --- a/tests/test_rest_endpoints/services/test_ocserv/turned_off.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": false - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": false - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ocserv/turned_on.json b/tests/test_rest_endpoints/services/test_ocserv/turned_on.json deleted file mode 100644 index b80ad9e..0000000 --- a/tests/test_rest_endpoints/services/test_ocserv/turned_on.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": false - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ocserv/undefined.json b/tests/test_rest_endpoints/services/test_ocserv/undefined.json deleted file mode 100644 index 12eb73a..0000000 --- a/tests/test_rest_endpoints/services/test_ocserv/undefined.json +++ /dev/null @@ -1,54 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": false - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_pleroma.py b/tests/test_rest_endpoints/services/test_pleroma.py deleted file mode 100644 index 0d7f149..0000000 --- a/tests/test_rest_endpoints/services/test_pleroma.py +++ /dev/null @@ -1,125 +0,0 @@ -import json -import pytest - - -def read_json(file_path): - with open(file_path, "r") as f: - return json.load(f) - - -############################################################################### - - -@pytest.fixture -def pleroma_off(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_off.json") - assert read_json(datadir / "turned_off.json")["pleroma"]["enable"] == False - return datadir - - -@pytest.fixture -def pleroma_on(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_on.json") - assert read_json(datadir / "turned_on.json")["pleroma"]["enable"] == True - return datadir - - -@pytest.fixture -def pleroma_enable_undefined(mocker, datadir): - mocker.patch( - "selfprivacy_api.utils.USERDATA_FILE", new=datadir / "enable_undefined.json" - ) - assert "enable" not in read_json(datadir / "enable_undefined.json")["pleroma"] - return datadir - - -@pytest.fixture -def pleroma_undefined(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "undefined.json") - assert "pleroma" not in read_json(datadir / "undefined.json") - return datadir - - -############################################################################### - - -@pytest.mark.parametrize("endpoint", ["enable", "disable"]) -def test_unauthorized(client, pleroma_off, endpoint): - response = client.post(f"/services/pleroma/{endpoint}") - assert response.status_code == 401 - - -@pytest.mark.parametrize("endpoint", ["enable", "disable"]) -def test_illegal_methods(authorized_client, pleroma_off, endpoint): - response = authorized_client.get(f"/services/pleroma/{endpoint}") - assert response.status_code == 405 - response = authorized_client.put(f"/services/pleroma/{endpoint}") - assert response.status_code == 405 - response = authorized_client.delete(f"/services/pleroma/{endpoint}") - assert response.status_code == 405 - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_switch_from_off(authorized_client, pleroma_off, endpoint, target_file): - response = authorized_client.post(f"/services/pleroma/{endpoint}") - assert response.status_code == 200 - assert read_json(pleroma_off / "turned_off.json") == read_json( - pleroma_off / target_file - ) - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_switch_from_on(authorized_client, pleroma_on, endpoint, target_file): - response = authorized_client.post(f"/services/pleroma/{endpoint}") - assert response.status_code == 200 - assert read_json(pleroma_on / "turned_on.json") == read_json( - pleroma_on / target_file - ) - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_switch_twice(authorized_client, pleroma_off, endpoint, target_file): - response = authorized_client.post(f"/services/pleroma/{endpoint}") - assert response.status_code == 200 - response = authorized_client.post(f"/services/pleroma/{endpoint}") - assert response.status_code == 200 - assert read_json(pleroma_off / "turned_off.json") == read_json( - pleroma_off / target_file - ) - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_on_attribute_deleted( - authorized_client, pleroma_enable_undefined, endpoint, target_file -): - response = authorized_client.post(f"/services/pleroma/{endpoint}") - assert response.status_code == 200 - assert read_json(pleroma_enable_undefined / "enable_undefined.json") == read_json( - pleroma_enable_undefined / target_file - ) - - -@pytest.mark.parametrize( - "endpoint,target_file", - [("enable", "turned_on.json"), ("disable", "turned_off.json")], -) -def test_on_pleroma_undefined( - authorized_client, pleroma_undefined, endpoint, target_file -): - response = authorized_client.post(f"/services/pleroma/{endpoint}") - assert response.status_code == 200 - assert read_json(pleroma_undefined / "undefined.json") == read_json( - pleroma_undefined / target_file - ) diff --git a/tests/test_rest_endpoints/services/test_pleroma/enable_undefined.json b/tests/test_rest_endpoints/services/test_pleroma/enable_undefined.json deleted file mode 100644 index 0903875..0000000 --- a/tests/test_rest_endpoints/services/test_pleroma/enable_undefined.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": false - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": false - }, - "pleroma": { - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_pleroma/turned_off.json b/tests/test_rest_endpoints/services/test_pleroma/turned_off.json deleted file mode 100644 index 813c01f..0000000 --- a/tests/test_rest_endpoints/services/test_pleroma/turned_off.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": false - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": false - }, - "pleroma": { - "enable": false - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_pleroma/turned_on.json b/tests/test_rest_endpoints/services/test_pleroma/turned_on.json deleted file mode 100644 index 1c08123..0000000 --- a/tests/test_rest_endpoints/services/test_pleroma/turned_on.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": false - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": false - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_pleroma/undefined.json b/tests/test_rest_endpoints/services/test_pleroma/undefined.json deleted file mode 100644 index 77d8ad2..0000000 --- a/tests/test_rest_endpoints/services/test_pleroma/undefined.json +++ /dev/null @@ -1,54 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": false - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": false - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_restic/no_values.json b/tests/test_rest_endpoints/services/test_restic/no_values.json deleted file mode 100644 index 3b4a2f5..0000000 --- a/tests/test_rest_endpoints/services/test_restic/no_values.json +++ /dev/null @@ -1,72 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "users": [ - { - "username": "user1", - "hashedPassword": "HASHED_PASSWORD_1", - "sshKeys": [ - "ssh-rsa KEY user1@pc" - ] - }, - { - "username": "user2", - "hashedPassword": "HASHED_PASSWORD_2", - "sshKeys": [ - ] - }, - { - "username": "user3", - "hashedPassword": "HASHED_PASSWORD_3" - } - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_restic/some_values.json b/tests/test_rest_endpoints/services/test_restic/some_values.json deleted file mode 100644 index c003d10..0000000 --- a/tests/test_rest_endpoints/services/test_restic/some_values.json +++ /dev/null @@ -1,76 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "users": [ - { - "username": "user1", - "hashedPassword": "HASHED_PASSWORD_1", - "sshKeys": [ - "ssh-rsa KEY user1@pc" - ] - }, - { - "username": "user2", - "hashedPassword": "HASHED_PASSWORD_2", - "sshKeys": [ - ] - }, - { - "username": "user3", - "hashedPassword": "HASHED_PASSWORD_3" - } - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "BUCKET" - } -} diff --git a/tests/test_rest_endpoints/services/test_restic/undefined.json b/tests/test_rest_endpoints/services/test_restic/undefined.json deleted file mode 100644 index 5bd1220..0000000 --- a/tests/test_rest_endpoints/services/test_restic/undefined.json +++ /dev/null @@ -1,70 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "users": [ - { - "username": "user1", - "hashedPassword": "HASHED_PASSWORD_1", - "sshKeys": [ - "ssh-rsa KEY user1@pc" - ] - }, - { - "username": "user2", - "hashedPassword": "HASHED_PASSWORD_2", - "sshKeys": [ - ] - }, - { - "username": "user3", - "hashedPassword": "HASHED_PASSWORD_3" - } - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_services.py b/tests/test_rest_endpoints/services/test_services.py deleted file mode 100644 index 1108e8c..0000000 --- a/tests/test_rest_endpoints/services/test_services.py +++ /dev/null @@ -1,138 +0,0 @@ -import base64 -import json -import pytest - - -def read_json(file_path): - with open(file_path, "r", encoding="utf-8") as file: - return json.load(file) - - -def call_args_asserts(mocked_object): - assert mocked_object.call_count == 7 - assert mocked_object.call_args_list[0][0][0] == [ - "systemctl", - "show", - "dovecot2.service", - ] - assert mocked_object.call_args_list[1][0][0] == [ - "systemctl", - "show", - "postfix.service", - ] - assert mocked_object.call_args_list[2][0][0] == [ - "systemctl", - "show", - "vaultwarden.service", - ] - assert mocked_object.call_args_list[3][0][0] == [ - "systemctl", - "show", - "gitea.service", - ] - assert mocked_object.call_args_list[4][0][0] == [ - "systemctl", - "show", - "phpfpm-nextcloud.service", - ] - assert mocked_object.call_args_list[5][0][0] == [ - "systemctl", - "show", - "ocserv.service", - ] - assert mocked_object.call_args_list[6][0][0] == [ - "systemctl", - "show", - "pleroma.service", - ] - - -SUCCESSFUL_STATUS = b""" -Type=oneshot -ExitType=main -Restart=no -NotifyAccess=none -RestartUSec=100ms -LoadState=loaded -ActiveState=active -FreezerState=running -SubState=exited -""" - -FAILED_STATUS = b""" -Type=oneshot -ExitType=main -Restart=no -NotifyAccess=none -RestartUSec=100ms -LoadState=loaded -ActiveState=failed -FreezerState=running -SubState=exited -""" - - -@pytest.fixture -def mock_subproccess_popen(mocker): - mock = mocker.patch( - "subprocess.check_output", autospec=True, return_value=SUCCESSFUL_STATUS - ) - return mock - - -@pytest.fixture -def mock_broken_service(mocker): - mock = mocker.patch( - "subprocess.check_output", autospec=True, return_value=FAILED_STATUS - ) - return mock - - -############################################################################### - - -def test_unauthorized(client, mock_subproccess_popen): - """Test unauthorized""" - response = client.get("/services/status") - assert response.status_code == 401 - - -def test_illegal_methods(authorized_client, mock_subproccess_popen): - response = authorized_client.post("/services/status") - assert response.status_code == 405 - response = authorized_client.put("/services/status") - assert response.status_code == 405 - response = authorized_client.delete("/services/status") - assert response.status_code == 405 - - -def test_dkim_key(authorized_client, mock_subproccess_popen): - response = authorized_client.get("/services/status") - assert response.status_code == 200 - assert response.json() == { - "imap": 0, - "smtp": 0, - "http": 0, - "bitwarden": 0, - "gitea": 0, - "nextcloud": 0, - "ocserv": 0, - "pleroma": 0, - } - call_args_asserts(mock_subproccess_popen) - - -def test_no_dkim_key(authorized_client, mock_broken_service): - response = authorized_client.get("/services/status") - assert response.status_code == 200 - assert response.json() == { - "imap": 1, - "smtp": 1, - "http": 0, - "bitwarden": 1, - "gitea": 1, - "nextcloud": 1, - "ocserv": 1, - "pleroma": 1, - } - call_args_asserts(mock_broken_service) diff --git a/tests/test_rest_endpoints/services/test_ssh.py b/tests/test_rest_endpoints/services/test_ssh.py deleted file mode 100644 index a17bdab..0000000 --- a/tests/test_rest_endpoints/services/test_ssh.py +++ /dev/null @@ -1,521 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -import json -import pytest - - -def read_json(file_path): - with open(file_path, "r", encoding="utf-8") as file: - return json.load(file) - - -## FIXTURES ################################################### - - -@pytest.fixture -def ssh_off(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_off.json") - assert not read_json(datadir / "turned_off.json")["ssh"]["enable"] - assert read_json(datadir / "turned_off.json")["ssh"]["passwordAuthentication"] - return datadir - - -@pytest.fixture -def ssh_on(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_on.json") - assert read_json(datadir / "turned_off.json")["ssh"]["passwordAuthentication"] - assert read_json(datadir / "turned_on.json")["ssh"]["enable"] - return datadir - - -@pytest.fixture -def all_off(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "all_off.json") - assert not read_json(datadir / "all_off.json")["ssh"]["passwordAuthentication"] - assert not read_json(datadir / "all_off.json")["ssh"]["enable"] - return datadir - - -@pytest.fixture -def undefined_settings(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "undefined.json") - assert "ssh" not in read_json(datadir / "undefined.json") - return datadir - - -@pytest.fixture -def undefined_values(mocker, datadir): - mocker.patch( - "selfprivacy_api.utils.USERDATA_FILE", new=datadir / "undefined_values.json" - ) - assert "ssh" in read_json(datadir / "undefined_values.json") - assert "enable" not in read_json(datadir / "undefined_values.json")["ssh"] - assert ( - "passwordAuthentication" - not in read_json(datadir / "undefined_values.json")["ssh"] - ) - return datadir - - -@pytest.fixture -def root_and_admin_have_keys(mocker, datadir): - mocker.patch( - "selfprivacy_api.utils.USERDATA_FILE", - new=datadir / "root_and_admin_have_keys.json", - ) - assert read_json(datadir / "root_and_admin_have_keys.json")["ssh"]["enable"] - assert read_json(datadir / "root_and_admin_have_keys.json")["ssh"][ - "passwordAuthentication" - ] - assert read_json(datadir / "root_and_admin_have_keys.json")["ssh"]["rootKeys"] == [ - "ssh-ed25519 KEY test@pc" - ] - assert read_json(datadir / "root_and_admin_have_keys.json")["sshKeys"] == [ - "ssh-rsa KEY test@pc" - ] - return datadir - - -@pytest.fixture -def some_users(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "some_users.json") - assert "users" in read_json(datadir / "some_users.json") - assert read_json(datadir / "some_users.json")["users"] == [ - { - "username": "user1", - "hashedPassword": "HASHED_PASSWORD_1", - "sshKeys": ["ssh-rsa KEY user1@pc"], - }, - {"username": "user2", "hashedPassword": "HASHED_PASSWORD_2", "sshKeys": []}, - {"username": "user3", "hashedPassword": "HASHED_PASSWORD_3"}, - ] - return datadir - - -## TEST 401 ###################################################### - - -@pytest.mark.parametrize("endpoint", ["ssh/enable", "ssh/keys/user"]) -def test_unauthorized(client, ssh_off, endpoint): - response = client.post(f"/services/{endpoint}") - assert response.status_code == 401 - - -@pytest.mark.parametrize("endpoint", ["ssh", "ssh/key/send"]) -def test_unauthorized_put(client, ssh_off, endpoint): - response = client.put(f"/services/{endpoint}") - assert response.status_code == 401 - - -## TEST ENABLE ###################################################### - - -def test_legacy_enable(authorized_client, ssh_off): - response = authorized_client.post("/services/ssh/enable") - assert response.status_code == 200 - assert read_json(ssh_off / "turned_off.json") == read_json( - ssh_off / "turned_on.json" - ) - - -def test_legacy_on_undefined(authorized_client, undefined_settings): - response = authorized_client.post("/services/ssh/enable") - assert response.status_code == 200 - data = read_json(undefined_settings / "undefined.json") - assert data["ssh"]["enable"] == True - - -def test_legacy_enable_when_enabled(authorized_client, ssh_on): - response = authorized_client.post("/services/ssh/enable") - assert response.status_code == 200 - assert read_json(ssh_on / "turned_on.json") == read_json(ssh_on / "turned_on.json") - - -## GET ON /ssh ###################################################### - - -def test_get_current_settings_ssh_off(authorized_client, ssh_off): - response = authorized_client.get("/services/ssh") - assert response.status_code == 200 - assert response.json() == {"enable": False, "passwordAuthentication": True} - - -def test_get_current_settings_ssh_on(authorized_client, ssh_on): - response = authorized_client.get("/services/ssh") - assert response.status_code == 200 - assert response.json() == {"enable": True, "passwordAuthentication": True} - - -def test_get_current_settings_all_off(authorized_client, all_off): - response = authorized_client.get("/services/ssh") - assert response.status_code == 200 - assert response.json() == {"enable": False, "passwordAuthentication": False} - - -def test_get_current_settings_undefined(authorized_client, undefined_settings): - response = authorized_client.get("/services/ssh") - assert response.status_code == 200 - assert response.json() == {"enable": True, "passwordAuthentication": True} - - -def test_get_current_settings_mostly_undefined(authorized_client, undefined_values): - response = authorized_client.get("/services/ssh") - assert response.status_code == 200 - assert response.json() == {"enable": True, "passwordAuthentication": True} - - -## PUT ON /ssh ###################################################### - -available_settings = [ - {"enable": True, "passwordAuthentication": True}, - {"enable": True, "passwordAuthentication": False}, - {"enable": False, "passwordAuthentication": True}, - {"enable": False, "passwordAuthentication": False}, - {"enable": True}, - {"enable": False}, - {"passwordAuthentication": True}, - {"passwordAuthentication": False}, -] - - -@pytest.mark.parametrize("settings", available_settings) -def test_set_settings_ssh_off(authorized_client, ssh_off, settings): - response = authorized_client.put("/services/ssh", json=settings) - assert response.status_code == 200 - data = read_json(ssh_off / "turned_off.json")["ssh"] - if "enable" in settings: - assert data["enable"] == settings["enable"] - if "passwordAuthentication" in settings: - assert data["passwordAuthentication"] == settings["passwordAuthentication"] - - -@pytest.mark.parametrize("settings", available_settings) -def test_set_settings_ssh_on(authorized_client, ssh_on, settings): - response = authorized_client.put("/services/ssh", json=settings) - assert response.status_code == 200 - data = read_json(ssh_on / "turned_on.json")["ssh"] - if "enable" in settings: - assert data["enable"] == settings["enable"] - if "passwordAuthentication" in settings: - assert data["passwordAuthentication"] == settings["passwordAuthentication"] - - -@pytest.mark.parametrize("settings", available_settings) -def test_set_settings_all_off(authorized_client, all_off, settings): - response = authorized_client.put("/services/ssh", json=settings) - assert response.status_code == 200 - data = read_json(all_off / "all_off.json")["ssh"] - if "enable" in settings: - assert data["enable"] == settings["enable"] - if "passwordAuthentication" in settings: - assert data["passwordAuthentication"] == settings["passwordAuthentication"] - - -@pytest.mark.parametrize("settings", available_settings) -def test_set_settings_undefined(authorized_client, undefined_settings, settings): - response = authorized_client.put("/services/ssh", json=settings) - assert response.status_code == 200 - data = read_json(undefined_settings / "undefined.json")["ssh"] - if "enable" in settings: - assert data["enable"] == settings["enable"] - if "passwordAuthentication" in settings: - assert data["passwordAuthentication"] == settings["passwordAuthentication"] - - -## PUT ON /ssh/key/send ###################################################### - - -def test_add_root_key(authorized_client, ssh_on): - response = authorized_client.put( - "/services/ssh/key/send", json={"public_key": "ssh-rsa KEY test@pc"} - ) - assert response.status_code == 201 - assert read_json(ssh_on / "turned_on.json")["ssh"]["rootKeys"] == [ - "ssh-rsa KEY test@pc", - ] - - -def test_add_root_key_on_undefined(authorized_client, undefined_settings): - response = authorized_client.put( - "/services/ssh/key/send", json={"public_key": "ssh-rsa KEY test@pc"} - ) - assert response.status_code == 201 - data = read_json(undefined_settings / "undefined.json") - assert data["ssh"]["rootKeys"] == ["ssh-rsa KEY test@pc"] - - -def test_add_root_key_one_more(authorized_client, root_and_admin_have_keys): - response = authorized_client.put( - "/services/ssh/key/send", json={"public_key": "ssh-rsa KEY test@pc"} - ) - assert response.status_code == 201 - assert read_json(root_and_admin_have_keys / "root_and_admin_have_keys.json")["ssh"][ - "rootKeys" - ] == [ - "ssh-ed25519 KEY test@pc", - "ssh-rsa KEY test@pc", - ] - - -def test_add_existing_root_key(authorized_client, root_and_admin_have_keys): - response = authorized_client.put( - "/services/ssh/key/send", json={"public_key": "ssh-ed25519 KEY test@pc"} - ) - assert response.status_code == 409 - assert read_json(root_and_admin_have_keys / "root_and_admin_have_keys.json")["ssh"][ - "rootKeys" - ] == [ - "ssh-ed25519 KEY test@pc", - ] - - -def test_add_invalid_root_key(authorized_client, ssh_on): - response = authorized_client.put( - "/services/ssh/key/send", json={"public_key": "INVALID KEY test@pc"} - ) - assert response.status_code == 400 - - -## /ssh/keys/{user} ###################################################### - - -def test_get_root_key(authorized_client, root_and_admin_have_keys): - response = authorized_client.get("/services/ssh/keys/root") - assert response.status_code == 200 - assert response.json() == ["ssh-ed25519 KEY test@pc"] - - -def test_get_root_key_when_none(authorized_client, ssh_on): - response = authorized_client.get("/services/ssh/keys/root") - assert response.status_code == 200 - assert response.json() == [] - - -def test_get_root_key_on_undefined(authorized_client, undefined_settings): - response = authorized_client.get("/services/ssh/keys/root") - assert response.status_code == 200 - assert response.json() == [] - - -def test_delete_root_key(authorized_client, root_and_admin_have_keys): - response = authorized_client.delete( - "/services/ssh/keys/root", json={"public_key": "ssh-ed25519 KEY test@pc"} - ) - assert response.status_code == 200 - assert ( - "rootKeys" - not in read_json(root_and_admin_have_keys / "root_and_admin_have_keys.json")[ - "ssh" - ] - or read_json(root_and_admin_have_keys / "root_and_admin_have_keys.json")["ssh"][ - "rootKeys" - ] - == [] - ) - - -def test_delete_root_nonexistent_key(authorized_client, root_and_admin_have_keys): - response = authorized_client.delete( - "/services/ssh/keys/root", json={"public_key": "ssh-rsa KEY test@pc"} - ) - assert response.status_code == 404 - assert read_json(root_and_admin_have_keys / "root_and_admin_have_keys.json")["ssh"][ - "rootKeys" - ] == [ - "ssh-ed25519 KEY test@pc", - ] - - -def test_delete_root_key_on_undefined(authorized_client, undefined_settings): - response = authorized_client.delete( - "/services/ssh/keys/root", json={"public_key": "ssh-ed25519 KEY test@pc"} - ) - assert response.status_code == 404 - assert "ssh" not in read_json(undefined_settings / "undefined.json") - - -def test_get_admin_key(authorized_client, root_and_admin_have_keys): - response = authorized_client.get("/services/ssh/keys/tester") - assert response.status_code == 200 - assert response.json() == ["ssh-rsa KEY test@pc"] - - -def test_get_admin_key_when_none(authorized_client, ssh_on): - response = authorized_client.get("/services/ssh/keys/tester") - assert response.status_code == 200 - assert response.json() == [] - - -def test_delete_admin_key(authorized_client, root_and_admin_have_keys): - response = authorized_client.delete( - "/services/ssh/keys/tester", json={"public_key": "ssh-rsa KEY test@pc"} - ) - assert response.status_code == 200 - assert ( - read_json(root_and_admin_have_keys / "root_and_admin_have_keys.json")["sshKeys"] - == [] - ) - - -def test_delete_nonexistent_admin_key(authorized_client, root_and_admin_have_keys): - response = authorized_client.delete( - "/services/ssh/keys/tester", json={"public_key": "ssh-rsa NO KEY test@pc"} - ) - assert response.status_code == 404 - assert read_json(root_and_admin_have_keys / "root_and_admin_have_keys.json")[ - "sshKeys" - ] == ["ssh-rsa KEY test@pc"] - - -def test_delete_admin_key_on_undefined(authorized_client, undefined_settings): - response = authorized_client.delete( - "/services/ssh/keys/tester", json={"public_key": "ssh-rsa KEY test@pc"} - ) - assert response.status_code == 404 - assert "sshKeys" not in read_json(undefined_settings / "undefined.json") - - -def test_add_admin_key(authorized_client, ssh_on): - response = authorized_client.post( - "/services/ssh/keys/tester", json={"public_key": "ssh-rsa KEY test@pc"} - ) - assert response.status_code == 201 - assert read_json(ssh_on / "turned_on.json")["sshKeys"] == [ - "ssh-rsa KEY test@pc", - ] - - -def test_add_admin_key_one_more(authorized_client, root_and_admin_have_keys): - response = authorized_client.post( - "/services/ssh/keys/tester", json={"public_key": "ssh-rsa KEY_2 test@pc"} - ) - assert response.status_code == 201 - assert read_json(root_and_admin_have_keys / "root_and_admin_have_keys.json")[ - "sshKeys" - ] == ["ssh-rsa KEY test@pc", "ssh-rsa KEY_2 test@pc"] - - -def test_add_existing_admin_key(authorized_client, root_and_admin_have_keys): - response = authorized_client.post( - "/services/ssh/keys/tester", json={"public_key": "ssh-rsa KEY test@pc"} - ) - assert response.status_code == 409 - assert read_json(root_and_admin_have_keys / "root_and_admin_have_keys.json")[ - "sshKeys" - ] == [ - "ssh-rsa KEY test@pc", - ] - - -def test_add_invalid_admin_key(authorized_client, ssh_on): - response = authorized_client.post( - "/services/ssh/keys/tester", json={"public_key": "INVALID KEY test@pc"} - ) - assert response.status_code == 400 - - -@pytest.mark.parametrize("user", [1, 2, 3]) -def test_get_user_key(authorized_client, some_users, user): - response = authorized_client.get(f"/services/ssh/keys/user{user}") - assert response.status_code == 200 - if user == 1: - assert response.json() == ["ssh-rsa KEY user1@pc"] - else: - assert response.json() == [] - - -def test_get_keys_of_nonexistent_user(authorized_client, some_users): - response = authorized_client.get("/services/ssh/keys/user4") - assert response.status_code == 404 - - -def test_get_keys_of_undefined_users(authorized_client, undefined_settings): - response = authorized_client.get("/services/ssh/keys/user1") - assert response.status_code == 404 - - -@pytest.mark.parametrize("user", [1, 2, 3]) -def test_add_user_key(authorized_client, some_users, user): - response = authorized_client.post( - f"/services/ssh/keys/user{user}", json={"public_key": "ssh-ed25519 KEY test@pc"} - ) - assert response.status_code == 201 - if user == 1: - assert read_json(some_users / "some_users.json")["users"][user - 1][ - "sshKeys" - ] == [ - "ssh-rsa KEY user1@pc", - "ssh-ed25519 KEY test@pc", - ] - else: - assert read_json(some_users / "some_users.json")["users"][user - 1][ - "sshKeys" - ] == ["ssh-ed25519 KEY test@pc"] - - -def test_add_existing_user_key(authorized_client, some_users): - response = authorized_client.post( - "/services/ssh/keys/user1", json={"public_key": "ssh-rsa KEY user1@pc"} - ) - assert response.status_code == 409 - assert read_json(some_users / "some_users.json")["users"][0]["sshKeys"] == [ - "ssh-rsa KEY user1@pc", - ] - - -def test_add_invalid_user_key(authorized_client, some_users): - response = authorized_client.post( - "/services/ssh/keys/user1", json={"public_key": "INVALID KEY user1@pc"} - ) - assert response.status_code == 400 - - -def test_delete_user_key(authorized_client, some_users): - response = authorized_client.delete( - "/services/ssh/keys/user1", json={"public_key": "ssh-rsa KEY user1@pc"} - ) - assert response.status_code == 200 - assert read_json(some_users / "some_users.json")["users"][0]["sshKeys"] == [] - - -@pytest.mark.parametrize("user", [2, 3]) -def test_delete_nonexistent_user_key(authorized_client, some_users, user): - response = authorized_client.delete( - f"/services/ssh/keys/user{user}", json={"public_key": "ssh-rsa KEY user1@pc"} - ) - assert response.status_code == 404 - if user == 2: - assert ( - read_json(some_users / "some_users.json")["users"][user - 1]["sshKeys"] - == [] - ) - if user == 3: - "sshKeys" not in read_json(some_users / "some_users.json")["users"][user - 1] - - -def test_add_keys_of_nonexistent_user(authorized_client, some_users): - response = authorized_client.post( - "/services/ssh/keys/user4", json={"public_key": "ssh-rsa KEY user4@pc"} - ) - assert response.status_code == 404 - - -def test_add_key_on_undefined_users(authorized_client, undefined_settings): - response = authorized_client.post( - "/services/ssh/keys/user1", json={"public_key": "ssh-rsa KEY user4@pc"} - ) - assert response.status_code == 404 - - -def test_delete_keys_of_nonexistent_user(authorized_client, some_users): - response = authorized_client.delete( - "/services/ssh/keys/user4", json={"public_key": "ssh-rsa KEY user4@pc"} - ) - assert response.status_code == 404 - - -def test_delete_key_when_undefined_users(authorized_client, undefined_settings): - response = authorized_client.delete( - "/services/ssh/keys/user1", json={"public_key": "ssh-rsa KEY user1@pc"} - ) - assert response.status_code == 404 diff --git a/tests/test_rest_endpoints/services/test_ssh/all_off.json b/tests/test_rest_endpoints/services/test_ssh/all_off.json deleted file mode 100644 index 051d364..0000000 --- a/tests/test_rest_endpoints/services/test_ssh/all_off.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": false, - "passwordAuthentication": false, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ssh/root_and_admin_have_keys.json b/tests/test_rest_endpoints/services/test_ssh/root_and_admin_have_keys.json deleted file mode 100644 index c1691ea..0000000 --- a/tests/test_rest_endpoints/services/test_ssh/root_and_admin_have_keys.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ssh/some_users.json b/tests/test_rest_endpoints/services/test_ssh/some_users.json deleted file mode 100644 index df6380a..0000000 --- a/tests/test_rest_endpoints/services/test_ssh/some_users.json +++ /dev/null @@ -1,76 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "users": [ - { - "username": "user1", - "hashedPassword": "HASHED_PASSWORD_1", - "sshKeys": [ - "ssh-rsa KEY user1@pc" - ] - }, - { - "username": "user2", - "hashedPassword": "HASHED_PASSWORD_2", - "sshKeys": [ - ] - }, - { - "username": "user3", - "hashedPassword": "HASHED_PASSWORD_3" - } - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ssh/turned_off.json b/tests/test_rest_endpoints/services/test_ssh/turned_off.json deleted file mode 100644 index 3856c80..0000000 --- a/tests/test_rest_endpoints/services/test_ssh/turned_off.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": false, - "passwordAuthentication": true - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ssh/turned_on.json b/tests/test_rest_endpoints/services/test_ssh/turned_on.json deleted file mode 100644 index e60c57f..0000000 --- a/tests/test_rest_endpoints/services/test_ssh/turned_on.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ssh/undefined.json b/tests/test_rest_endpoints/services/test_ssh/undefined.json deleted file mode 100644 index 7c9af37..0000000 --- a/tests/test_rest_endpoints/services/test_ssh/undefined.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/services/test_ssh/undefined_values.json b/tests/test_rest_endpoints/services/test_ssh/undefined_values.json deleted file mode 100644 index b7b03d3..0000000 --- a/tests/test_rest_endpoints/services/test_ssh/undefined_values.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": {}, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/test_auth.py b/tests/test_rest_endpoints/test_auth.py deleted file mode 100644 index 12de0cf..0000000 --- a/tests/test_rest_endpoints/test_auth.py +++ /dev/null @@ -1,535 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=missing-function-docstring -import datetime -import pytest -from mnemonic import Mnemonic - -from selfprivacy_api.repositories.tokens.json_tokens_repository import ( - JsonTokensRepository, -) - -TOKEN_REPO = JsonTokensRepository() - -from tests.common import read_json, write_json - - -TOKENS_FILE_CONTETS = { - "tokens": [ - { - "token": "TEST_TOKEN", - "name": "test_token", - "date": "2022-01-14 08:31:10.789314", - }, - { - "token": "TEST_TOKEN2", - "name": "test_token2", - "date": "2022-01-14 08:31:10.789314", - }, - ] -} - -DATE_FORMATS = [ - "%Y-%m-%dT%H:%M:%S.%fZ", - "%Y-%m-%dT%H:%M:%S.%f", - "%Y-%m-%d %H:%M:%S.%fZ", - "%Y-%m-%d %H:%M:%S.%f", -] - - -def test_get_tokens_info(authorized_client, tokens_file): - response = authorized_client.get("/auth/tokens") - assert response.status_code == 200 - assert response.json() == [ - {"name": "test_token", "date": "2022-01-14T08:31:10.789314", "is_caller": True}, - { - "name": "test_token2", - "date": "2022-01-14T08:31:10.789314", - "is_caller": False, - }, - ] - - -def test_get_tokens_unauthorized(client, tokens_file): - response = client.get("/auth/tokens") - assert response.status_code == 401 - - -def test_delete_token_unauthorized(client, tokens_file): - response = client.delete("/auth/tokens") - assert response.status_code == 401 - assert read_json(tokens_file) == TOKENS_FILE_CONTETS - - -def test_delete_token(authorized_client, tokens_file): - response = authorized_client.delete( - "/auth/tokens", json={"token_name": "test_token2"} - ) - assert response.status_code == 200 - assert read_json(tokens_file) == { - "tokens": [ - { - "token": "TEST_TOKEN", - "name": "test_token", - "date": "2022-01-14 08:31:10.789314", - } - ] - } - - -def test_delete_self_token(authorized_client, tokens_file): - response = authorized_client.delete( - "/auth/tokens", json={"token_name": "test_token"} - ) - assert response.status_code == 400 - assert read_json(tokens_file) == TOKENS_FILE_CONTETS - - -def test_delete_nonexistent_token(authorized_client, tokens_file): - response = authorized_client.delete( - "/auth/tokens", json={"token_name": "test_token3"} - ) - assert response.status_code == 404 - assert read_json(tokens_file) == TOKENS_FILE_CONTETS - - -def test_refresh_token_unauthorized(client, tokens_file): - response = client.post("/auth/tokens") - assert response.status_code == 401 - assert read_json(tokens_file) == TOKENS_FILE_CONTETS - - -def test_refresh_token(authorized_client, tokens_file): - response = authorized_client.post("/auth/tokens") - assert response.status_code == 200 - new_token = response.json()["token"] - assert TOKEN_REPO.get_token_by_token_string(new_token) is not None - - -# new device - - -def test_get_new_device_auth_token_unauthorized(client, tokens_file): - response = client.post("/auth/new_device") - assert response.status_code == 401 - assert read_json(tokens_file) == TOKENS_FILE_CONTETS - - -def test_get_new_device_auth_token(authorized_client, tokens_file): - response = authorized_client.post("/auth/new_device") - assert response.status_code == 200 - assert "token" in response.json() - token = Mnemonic(language="english").to_entropy(response.json()["token"]).hex() - assert read_json(tokens_file)["new_device"]["token"] == token - - -def test_get_and_delete_new_device_token(authorized_client, tokens_file): - response = authorized_client.post("/auth/new_device") - assert response.status_code == 200 - assert "token" in response.json() - token = Mnemonic(language="english").to_entropy(response.json()["token"]).hex() - assert read_json(tokens_file)["new_device"]["token"] == token - response = authorized_client.delete( - "/auth/new_device", json={"token": response.json()["token"]} - ) - assert response.status_code == 200 - assert read_json(tokens_file) == TOKENS_FILE_CONTETS - - -def test_delete_token_unauthenticated(client, tokens_file): - response = client.delete("/auth/new_device") - assert response.status_code == 401 - assert read_json(tokens_file) == TOKENS_FILE_CONTETS - - -def test_get_and_authorize_new_device(client, authorized_client, tokens_file): - response = authorized_client.post("/auth/new_device") - assert response.status_code == 200 - assert "token" in response.json() - token = Mnemonic(language="english").to_entropy(response.json()["token"]).hex() - assert read_json(tokens_file)["new_device"]["token"] == token - response = client.post( - "/auth/new_device/authorize", - json={"token": response.json()["token"], "device": "new_device"}, - ) - assert response.status_code == 200 - assert read_json(tokens_file)["tokens"][2]["token"] == response.json()["token"] - assert read_json(tokens_file)["tokens"][2]["name"] == "new_device" - - -def test_authorize_new_device_with_invalid_token(client, tokens_file): - response = client.post( - "/auth/new_device/authorize", - json={"token": "invalid_token", "device": "new_device"}, - ) - assert response.status_code == 404 - assert read_json(tokens_file) == TOKENS_FILE_CONTETS - - -def test_get_and_authorize_used_token(client, authorized_client, tokens_file): - response = authorized_client.post("/auth/new_device") - assert response.status_code == 200 - assert "token" in response.json() - token = Mnemonic(language="english").to_entropy(response.json()["token"]).hex() - assert read_json(tokens_file)["new_device"]["token"] == token - response = client.post( - "/auth/new_device/authorize", - json={"token": response.json()["token"], "device": "new_device"}, - ) - assert response.status_code == 200 - assert read_json(tokens_file)["tokens"][2]["token"] == response.json()["token"] - assert read_json(tokens_file)["tokens"][2]["name"] == "new_device" - response = client.post( - "/auth/new_device/authorize", - json={"token": response.json()["token"], "device": "new_device"}, - ) - assert response.status_code == 404 - - -def test_get_and_authorize_token_after_12_minutes( - client, authorized_client, tokens_file -): - response = authorized_client.post("/auth/new_device") - assert response.status_code == 200 - assert "token" in response.json() - token = Mnemonic(language="english").to_entropy(response.json()["token"]).hex() - assert read_json(tokens_file)["new_device"]["token"] == token - - file_data = read_json(tokens_file) - file_data["new_device"]["expiration"] = str( - datetime.datetime.now() - datetime.timedelta(minutes=13) - ) - write_json(tokens_file, file_data) - - response = client.post( - "/auth/new_device/authorize", - json={"token": response.json()["token"], "device": "new_device"}, - ) - assert response.status_code == 404 - - -def test_authorize_without_token(client, tokens_file): - response = client.post( - "/auth/new_device/authorize", - json={"device": "new_device"}, - ) - assert response.status_code == 422 - assert read_json(tokens_file) == TOKENS_FILE_CONTETS - - -# Recovery tokens -# GET /auth/recovery_token returns token status -# - if token is valid, returns 200 and token status -# - token status: -# - exists (boolean) -# - valid (boolean) -# - date (string) -# - expiration (string) -# - uses_left (int) -# - if token is invalid, returns 400 and empty body -# POST /auth/recovery_token generates a new token -# has two optional parameters: -# - expiration (string in datetime format) -# - uses_left (int) -# POST /auth/recovery_token/use uses the token -# required arguments: -# - token (string) -# - device (string) -# - if token is valid, returns 200 and token -# - if token is invalid, returns 404 -# - if request is invalid, returns 400 - - -def test_get_recovery_token_status_unauthorized(client, tokens_file): - response = client.get("/auth/recovery_token") - assert response.status_code == 401 - assert read_json(tokens_file) == TOKENS_FILE_CONTETS - - -def test_get_recovery_token_when_none_exists(authorized_client, tokens_file): - response = authorized_client.get("/auth/recovery_token") - assert response.status_code == 200 - assert response.json() == { - "exists": False, - "valid": False, - "date": None, - "expiration": None, - "uses_left": None, - } - assert read_json(tokens_file) == TOKENS_FILE_CONTETS - - -def test_generate_recovery_token(authorized_client, client, tokens_file): - # Generate token without expiration and uses_left - response = authorized_client.post("/auth/recovery_token") - assert response.status_code == 200 - assert "token" in response.json() - mnemonic_token = response.json()["token"] - token = Mnemonic(language="english").to_entropy(mnemonic_token).hex() - assert read_json(tokens_file)["recovery_token"]["token"] == token - - time_generated = read_json(tokens_file)["recovery_token"]["date"] - assert time_generated is not None - # Assert that the token was generated near the current time - assert ( - datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%f") - - datetime.timedelta(seconds=5) - < datetime.datetime.now() - ) - - # Try to get token status - response = authorized_client.get("/auth/recovery_token") - assert response.status_code == 200 - assert response.json() == { - "exists": True, - "valid": True, - "date": time_generated, - "expiration": None, - "uses_left": None, - } - - # Try to use the token - recovery_response = client.post( - "/auth/recovery_token/use", - json={"token": mnemonic_token, "device": "recovery_device"}, - ) - assert recovery_response.status_code == 200 - new_token = recovery_response.json()["token"] - assert read_json(tokens_file)["tokens"][2]["token"] == new_token - assert read_json(tokens_file)["tokens"][2]["name"] == "recovery_device" - - # Try to use token again - recovery_response = client.post( - "/auth/recovery_token/use", - json={"token": mnemonic_token, "device": "recovery_device2"}, - ) - assert recovery_response.status_code == 200 - new_token = recovery_response.json()["token"] - assert read_json(tokens_file)["tokens"][3]["token"] == new_token - assert read_json(tokens_file)["tokens"][3]["name"] == "recovery_device2" - - -@pytest.mark.parametrize("timeformat", DATE_FORMATS) -def test_generate_recovery_token_with_expiration_date( - authorized_client, client, tokens_file, timeformat -): - # Generate token with expiration date - # Generate expiration date in the future - expiration_date = datetime.datetime.now() + datetime.timedelta(minutes=5) - expiration_date_str = expiration_date.strftime(timeformat) - response = authorized_client.post( - "/auth/recovery_token", - json={"expiration": expiration_date_str}, - ) - assert response.status_code == 200 - assert "token" in response.json() - mnemonic_token = response.json()["token"] - token = Mnemonic(language="english").to_entropy(mnemonic_token).hex() - assert read_json(tokens_file)["recovery_token"]["token"] == token - assert datetime.datetime.strptime( - read_json(tokens_file)["recovery_token"]["expiration"], "%Y-%m-%dT%H:%M:%S.%f" - ) == datetime.datetime.strptime(expiration_date_str, timeformat) - - time_generated = read_json(tokens_file)["recovery_token"]["date"] - assert time_generated is not None - # Assert that the token was generated near the current time - assert ( - datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%f") - - datetime.timedelta(seconds=5) - < datetime.datetime.now() - ) - - # Try to get token status - response = authorized_client.get("/auth/recovery_token") - assert response.status_code == 200 - assert response.json() == { - "exists": True, - "valid": True, - "date": time_generated, - "expiration": expiration_date.strftime("%Y-%m-%dT%H:%M:%S.%f"), - "uses_left": None, - } - - # Try to use the token - recovery_response = client.post( - "/auth/recovery_token/use", - json={"token": mnemonic_token, "device": "recovery_device"}, - ) - assert recovery_response.status_code == 200 - new_token = recovery_response.json()["token"] - assert read_json(tokens_file)["tokens"][2]["token"] == new_token - assert read_json(tokens_file)["tokens"][2]["name"] == "recovery_device" - - # Try to use token again - recovery_response = client.post( - "/auth/recovery_token/use", - json={"token": mnemonic_token, "device": "recovery_device2"}, - ) - assert recovery_response.status_code == 200 - new_token = recovery_response.json()["token"] - assert read_json(tokens_file)["tokens"][3]["token"] == new_token - assert read_json(tokens_file)["tokens"][3]["name"] == "recovery_device2" - - # Try to use token after expiration date - new_data = read_json(tokens_file) - new_data["recovery_token"]["expiration"] = datetime.datetime.now().strftime( - "%Y-%m-%dT%H:%M:%S.%f" - ) - write_json(tokens_file, new_data) - recovery_response = client.post( - "/auth/recovery_token/use", - json={"token": mnemonic_token, "device": "recovery_device3"}, - ) - assert recovery_response.status_code == 404 - # Assert that the token was not created in JSON - assert read_json(tokens_file)["tokens"] == new_data["tokens"] - - # Get the status of the token - response = authorized_client.get("/auth/recovery_token") - assert response.status_code == 200 - assert response.json() == { - "exists": True, - "valid": False, - "date": time_generated, - "expiration": new_data["recovery_token"]["expiration"], - "uses_left": None, - } - - -@pytest.mark.parametrize("timeformat", DATE_FORMATS) -def test_generate_recovery_token_with_expiration_in_the_past( - authorized_client, tokens_file, timeformat -): - # Server must return 400 if expiration date is in the past - expiration_date = datetime.datetime.utcnow() - datetime.timedelta(minutes=5) - expiration_date_str = expiration_date.strftime(timeformat) - response = authorized_client.post( - "/auth/recovery_token", - json={"expiration": expiration_date_str}, - ) - assert response.status_code == 400 - assert "recovery_token" not in read_json(tokens_file) - - -def test_generate_recovery_token_with_invalid_time_format( - authorized_client, tokens_file -): - # Server must return 400 if expiration date is in the past - expiration_date = "invalid_time_format" - response = authorized_client.post( - "/auth/recovery_token", - json={"expiration": expiration_date}, - ) - assert response.status_code == 422 - assert "recovery_token" not in read_json(tokens_file) - - -def test_generate_recovery_token_with_limited_uses( - authorized_client, client, tokens_file -): - # Generate token with limited uses - response = authorized_client.post( - "/auth/recovery_token", - json={"uses": 2}, - ) - assert response.status_code == 200 - assert "token" in response.json() - mnemonic_token = response.json()["token"] - token = Mnemonic(language="english").to_entropy(mnemonic_token).hex() - assert read_json(tokens_file)["recovery_token"]["token"] == token - assert read_json(tokens_file)["recovery_token"]["uses_left"] == 2 - - # Get the date of the token - time_generated = read_json(tokens_file)["recovery_token"]["date"] - assert time_generated is not None - assert ( - datetime.datetime.strptime(time_generated, "%Y-%m-%dT%H:%M:%S.%f") - - datetime.timedelta(seconds=5) - < datetime.datetime.now() - ) - - # Try to get token status - response = authorized_client.get("/auth/recovery_token") - assert response.status_code == 200 - assert response.json() == { - "exists": True, - "valid": True, - "date": time_generated, - "expiration": None, - "uses_left": 2, - } - - # Try to use the token - recovery_response = client.post( - "/auth/recovery_token/use", - json={"token": mnemonic_token, "device": "recovery_device"}, - ) - assert recovery_response.status_code == 200 - new_token = recovery_response.json()["token"] - assert read_json(tokens_file)["tokens"][2]["token"] == new_token - assert read_json(tokens_file)["tokens"][2]["name"] == "recovery_device" - - assert read_json(tokens_file)["recovery_token"]["uses_left"] == 1 - - # Get the status of the token - response = authorized_client.get("/auth/recovery_token") - assert response.status_code == 200 - assert response.json() == { - "exists": True, - "valid": True, - "date": time_generated, - "expiration": None, - "uses_left": 1, - } - - # Try to use token again - recovery_response = client.post( - "/auth/recovery_token/use", - json={"token": mnemonic_token, "device": "recovery_device2"}, - ) - assert recovery_response.status_code == 200 - new_token = recovery_response.json()["token"] - assert read_json(tokens_file)["tokens"][3]["token"] == new_token - assert read_json(tokens_file)["tokens"][3]["name"] == "recovery_device2" - - # Get the status of the token - response = authorized_client.get("/auth/recovery_token") - assert response.status_code == 200 - assert response.json() == { - "exists": True, - "valid": False, - "date": time_generated, - "expiration": None, - "uses_left": 0, - } - - # Try to use token after limited uses - recovery_response = client.post( - "/auth/recovery_token/use", - json={"token": mnemonic_token, "device": "recovery_device3"}, - ) - assert recovery_response.status_code == 404 - - assert read_json(tokens_file)["recovery_token"]["uses_left"] == 0 - - -def test_generate_recovery_token_with_negative_uses( - authorized_client, client, tokens_file -): - # Generate token with limited uses - response = authorized_client.post( - "/auth/recovery_token", - json={"uses": -2}, - ) - assert response.status_code == 400 - assert "recovery_token" not in read_json(tokens_file) - - -def test_generate_recovery_token_with_zero_uses(authorized_client, client, tokens_file): - # Generate token with limited uses - response = authorized_client.post( - "/auth/recovery_token", - json={"uses": 0}, - ) - assert response.status_code == 400 - assert "recovery_token" not in read_json(tokens_file) diff --git a/tests/test_rest_endpoints/test_system.py b/tests/test_rest_endpoints/test_system.py deleted file mode 100644 index 90c1499..0000000 --- a/tests/test_rest_endpoints/test_system.py +++ /dev/null @@ -1,416 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=missing-function-docstring - -import json -import os -import pytest -from selfprivacy_api.utils import get_domain - - -def read_json(file_path): - with open(file_path, "r", encoding="utf-8") as file: - return json.load(file) - - -@pytest.fixture -def domain_file(mocker, datadir): - mocker.patch("selfprivacy_api.utils.DOMAIN_FILE", datadir / "domain") - return datadir - - -@pytest.fixture -def turned_on(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_on.json") - assert read_json(datadir / "turned_on.json")["autoUpgrade"]["enable"] == True - assert read_json(datadir / "turned_on.json")["autoUpgrade"]["allowReboot"] == True - assert read_json(datadir / "turned_on.json")["timezone"] == "Europe/Moscow" - return datadir - - -@pytest.fixture -def turned_off(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "turned_off.json") - assert read_json(datadir / "turned_off.json")["autoUpgrade"]["enable"] == False - assert read_json(datadir / "turned_off.json")["autoUpgrade"]["allowReboot"] == False - assert read_json(datadir / "turned_off.json")["timezone"] == "Europe/Moscow" - return datadir - - -@pytest.fixture -def undefined_config(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "undefined.json") - assert "autoUpgrade" not in read_json(datadir / "undefined.json") - assert "timezone" not in read_json(datadir / "undefined.json") - return datadir - - -@pytest.fixture -def no_values(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "no_values.json") - assert "enable" not in read_json(datadir / "no_values.json")["autoUpgrade"] - assert "allowReboot" not in read_json(datadir / "no_values.json")["autoUpgrade"] - return datadir - - -class ProcessMock: - """Mock subprocess.Popen""" - - def __init__(self, args, **kwargs): - self.args = args - self.kwargs = kwargs - - def communicate(): - return (b"", None) - - returncode = 0 - - -class BrokenServiceMock(ProcessMock): - """Mock subprocess.Popen""" - - def communicate(): - return (b"Testing error", None) - - returncode = 3 - - -@pytest.fixture -def mock_subprocess_popen(mocker): - mock = mocker.patch("subprocess.Popen", autospec=True, return_value=ProcessMock) - return mock - - -@pytest.fixture -def mock_os_chdir(mocker): - mock = mocker.patch("os.chdir", autospec=True) - return mock - - -@pytest.fixture -def mock_broken_service(mocker): - mock = mocker.patch( - "subprocess.Popen", autospec=True, return_value=BrokenServiceMock - ) - return mock - - -@pytest.fixture -def mock_subprocess_check_output(mocker): - mock = mocker.patch( - "subprocess.check_output", autospec=True, return_value=b"Testing Linux" - ) - return mock - - -def test_wrong_auth(wrong_auth_client): - response = wrong_auth_client.get("/system/pythonVersion") - assert response.status_code == 401 - - -def test_get_domain(authorized_client, domain_file): - assert get_domain() == "test-domain.tld" - - -## Timezones - - -def test_get_timezone_unauthorized(client, turned_on): - response = client.get("/system/configuration/timezone") - assert response.status_code == 401 - - -def test_get_timezone(authorized_client, turned_on): - response = authorized_client.get("/system/configuration/timezone") - assert response.status_code == 200 - assert response.json() == "Europe/Moscow" - - -def test_get_timezone_on_undefined(authorized_client, undefined_config): - response = authorized_client.get("/system/configuration/timezone") - assert response.status_code == 200 - assert response.json() == "Europe/Uzhgorod" - - -def test_put_timezone_unauthorized(client, turned_on): - response = client.put( - "/system/configuration/timezone", json={"timezone": "Europe/Moscow"} - ) - assert response.status_code == 401 - - -def test_put_timezone(authorized_client, turned_on): - response = authorized_client.put( - "/system/configuration/timezone", json={"timezone": "Europe/Helsinki"} - ) - assert response.status_code == 200 - assert read_json(turned_on / "turned_on.json")["timezone"] == "Europe/Helsinki" - - -def test_put_timezone_on_undefined(authorized_client, undefined_config): - response = authorized_client.put( - "/system/configuration/timezone", json={"timezone": "Europe/Helsinki"} - ) - assert response.status_code == 200 - assert ( - read_json(undefined_config / "undefined.json")["timezone"] == "Europe/Helsinki" - ) - - -def test_put_timezone_without_timezone(authorized_client, turned_on): - response = authorized_client.put("/system/configuration/timezone", json={}) - assert response.status_code == 422 - assert read_json(turned_on / "turned_on.json")["timezone"] == "Europe/Moscow" - - -def test_put_invalid_timezone(authorized_client, turned_on): - response = authorized_client.put( - "/system/configuration/timezone", json={"timezone": "Invalid/Timezone"} - ) - assert response.status_code == 400 - assert read_json(turned_on / "turned_on.json")["timezone"] == "Europe/Moscow" - - -## AutoUpgrade - - -def test_get_auto_upgrade_unauthorized(client, turned_on): - response = client.get("/system/configuration/autoUpgrade") - assert response.status_code == 401 - - -def test_get_auto_upgrade(authorized_client, turned_on): - response = authorized_client.get("/system/configuration/autoUpgrade") - assert response.status_code == 200 - assert response.json() == { - "enable": True, - "allowReboot": True, - } - - -def test_get_auto_upgrade_on_undefined(authorized_client, undefined_config): - response = authorized_client.get("/system/configuration/autoUpgrade") - assert response.status_code == 200 - assert response.json() == { - "enable": True, - "allowReboot": False, - } - - -def test_get_auto_upgrade_without_values(authorized_client, no_values): - response = authorized_client.get("/system/configuration/autoUpgrade") - assert response.status_code == 200 - assert response.json() == { - "enable": True, - "allowReboot": False, - } - - -def test_get_auto_upgrade_turned_off(authorized_client, turned_off): - response = authorized_client.get("/system/configuration/autoUpgrade") - assert response.status_code == 200 - assert response.json() == { - "enable": False, - "allowReboot": False, - } - - -def test_put_auto_upgrade_unauthorized(client, turned_on): - response = client.put( - "/system/configuration/autoUpgrade", json={"enable": True, "allowReboot": True} - ) - assert response.status_code == 401 - - -def test_put_auto_upgrade(authorized_client, turned_on): - response = authorized_client.put( - "/system/configuration/autoUpgrade", json={"enable": False, "allowReboot": True} - ) - assert response.status_code == 200 - assert read_json(turned_on / "turned_on.json")["autoUpgrade"] == { - "enable": False, - "allowReboot": True, - } - - -def test_put_auto_upgrade_on_undefined(authorized_client, undefined_config): - response = authorized_client.put( - "/system/configuration/autoUpgrade", json={"enable": False, "allowReboot": True} - ) - assert response.status_code == 200 - assert read_json(undefined_config / "undefined.json")["autoUpgrade"] == { - "enable": False, - "allowReboot": True, - } - - -def test_put_auto_upgrade_without_values(authorized_client, no_values): - response = authorized_client.put( - "/system/configuration/autoUpgrade", json={"enable": True, "allowReboot": True} - ) - assert response.status_code == 200 - assert read_json(no_values / "no_values.json")["autoUpgrade"] == { - "enable": True, - "allowReboot": True, - } - - -def test_put_auto_upgrade_turned_off(authorized_client, turned_off): - response = authorized_client.put( - "/system/configuration/autoUpgrade", json={"enable": True, "allowReboot": True} - ) - assert response.status_code == 200 - assert read_json(turned_off / "turned_off.json")["autoUpgrade"] == { - "enable": True, - "allowReboot": True, - } - - -def test_put_auto_upgrade_without_enable(authorized_client, turned_off): - response = authorized_client.put( - "/system/configuration/autoUpgrade", json={"allowReboot": True} - ) - assert response.status_code == 200 - assert read_json(turned_off / "turned_off.json")["autoUpgrade"] == { - "enable": False, - "allowReboot": True, - } - - -def test_put_auto_upgrade_without_allow_reboot(authorized_client, turned_off): - response = authorized_client.put( - "/system/configuration/autoUpgrade", json={"enable": True} - ) - assert response.status_code == 200 - assert read_json(turned_off / "turned_off.json")["autoUpgrade"] == { - "enable": True, - "allowReboot": False, - } - - -def test_put_auto_upgrade_with_empty_json(authorized_client, turned_off): - response = authorized_client.put("/system/configuration/autoUpgrade", json={}) - assert response.status_code == 200 - assert read_json(turned_off / "turned_off.json")["autoUpgrade"] == { - "enable": False, - "allowReboot": False, - } - - -def test_system_rebuild_unauthorized(client, mock_subprocess_popen): - response = client.get("/system/configuration/apply") - assert response.status_code == 401 - assert mock_subprocess_popen.call_count == 0 - - -def test_system_rebuild(authorized_client, mock_subprocess_popen): - response = authorized_client.get("/system/configuration/apply") - assert response.status_code == 200 - assert mock_subprocess_popen.call_count == 1 - assert mock_subprocess_popen.call_args[0][0] == [ - "systemctl", - "start", - "sp-nixos-rebuild.service", - ] - - -def test_system_upgrade_unauthorized(client, mock_subprocess_popen): - response = client.get("/system/configuration/upgrade") - assert response.status_code == 401 - assert mock_subprocess_popen.call_count == 0 - - -def test_system_upgrade(authorized_client, mock_subprocess_popen): - response = authorized_client.get("/system/configuration/upgrade") - assert response.status_code == 200 - assert mock_subprocess_popen.call_count == 1 - assert mock_subprocess_popen.call_args[0][0] == [ - "systemctl", - "start", - "sp-nixos-upgrade.service", - ] - - -def test_system_rollback_unauthorized(client, mock_subprocess_popen): - response = client.get("/system/configuration/rollback") - assert response.status_code == 401 - assert mock_subprocess_popen.call_count == 0 - - -def test_system_rollback(authorized_client, mock_subprocess_popen): - response = authorized_client.get("/system/configuration/rollback") - assert response.status_code == 200 - assert mock_subprocess_popen.call_count == 1 - assert mock_subprocess_popen.call_args[0][0] == [ - "systemctl", - "start", - "sp-nixos-rollback.service", - ] - - -def test_get_system_version_unauthorized(client, mock_subprocess_check_output): - response = client.get("/system/version") - assert response.status_code == 401 - assert mock_subprocess_check_output.call_count == 0 - - -def test_get_system_version(authorized_client, mock_subprocess_check_output): - response = authorized_client.get("/system/version") - assert response.status_code == 200 - assert response.json() == {"system_version": "Testing Linux"} - assert mock_subprocess_check_output.call_count == 1 - assert mock_subprocess_check_output.call_args[0][0] == ["uname", "-a"] - - -def test_reboot_system_unauthorized(client, mock_subprocess_popen): - response = client.get("/system/reboot") - assert response.status_code == 401 - assert mock_subprocess_popen.call_count == 0 - - -def test_reboot_system(authorized_client, mock_subprocess_popen): - response = authorized_client.get("/system/reboot") - assert response.status_code == 200 - assert mock_subprocess_popen.call_count == 1 - assert mock_subprocess_popen.call_args[0][0] == ["reboot"] - - -def test_get_python_version_unauthorized(client, mock_subprocess_check_output): - response = client.get("/system/pythonVersion") - assert response.status_code == 401 - assert mock_subprocess_check_output.call_count == 0 - - -def test_get_python_version(authorized_client, mock_subprocess_check_output): - response = authorized_client.get("/system/pythonVersion") - assert response.status_code == 200 - assert response.json() == "Testing Linux" - assert mock_subprocess_check_output.call_count == 1 - assert mock_subprocess_check_output.call_args[0][0] == ["python", "-V"] - - -def test_pull_system_unauthorized(client, mock_subprocess_popen): - response = client.get("/system/configuration/pull") - assert response.status_code == 401 - assert mock_subprocess_popen.call_count == 0 - - -def test_pull_system(authorized_client, mock_subprocess_popen, mock_os_chdir): - current_dir = os.getcwd() - response = authorized_client.get("/system/configuration/pull") - assert response.status_code == 200 - assert mock_subprocess_popen.call_count == 1 - assert mock_subprocess_popen.call_args[0][0] == ["git", "pull"] - assert mock_os_chdir.call_count == 2 - assert mock_os_chdir.call_args_list[0][0][0] == "/etc/nixos" - assert mock_os_chdir.call_args_list[1][0][0] == current_dir - - -def test_pull_system_broken_repo(authorized_client, mock_broken_service, mock_os_chdir): - current_dir = os.getcwd() - response = authorized_client.get("/system/configuration/pull") - assert response.status_code == 500 - assert mock_broken_service.call_count == 1 - assert mock_os_chdir.call_count == 2 - assert mock_os_chdir.call_args_list[0][0][0] == "/etc/nixos" - assert mock_os_chdir.call_args_list[1][0][0] == current_dir diff --git a/tests/test_rest_endpoints/test_system/domain b/tests/test_rest_endpoints/test_system/domain deleted file mode 100644 index 3679d0d..0000000 --- a/tests/test_rest_endpoints/test_system/domain +++ /dev/null @@ -1 +0,0 @@ -test-domain.tld \ No newline at end of file diff --git a/tests/test_rest_endpoints/test_system/no_values.json b/tests/test_rest_endpoints/test_system/no_values.json deleted file mode 100644 index 5c1431e..0000000 --- a/tests/test_rest_endpoints/test_system/no_values.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": true - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/test_system/turned_off.json b/tests/test_rest_endpoints/test_system/turned_off.json deleted file mode 100644 index 2336f36..0000000 --- a/tests/test_rest_endpoints/test_system/turned_off.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": true - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": false, - "allowReboot": false - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/test_system/turned_on.json b/tests/test_rest_endpoints/test_system/turned_on.json deleted file mode 100644 index 42999d8..0000000 --- a/tests/test_rest_endpoints/test_system/turned_on.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": true - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/test_system/undefined.json b/tests/test_rest_endpoints/test_system/undefined.json deleted file mode 100644 index 6b9f3fd..0000000 --- a/tests/test_rest_endpoints/test_system/undefined.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": true - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/test_users.py b/tests/test_rest_endpoints/test_users.py deleted file mode 100644 index ebb3eff..0000000 --- a/tests/test_rest_endpoints/test_users.py +++ /dev/null @@ -1,285 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -import json -import pytest - - -def read_json(file_path): - with open(file_path, "r", encoding="utf-8") as file: - return json.load(file) - - -invalid_usernames = [ - "root", - "messagebus", - "postfix", - "polkituser", - "dovecot2", - "dovenull", - "nginx", - "postgres", - "systemd-journal-gateway", - "prosody", - "systemd-network", - "systemd-resolve", - "systemd-timesync", - "opendkim", - "rspamd", - "sshd", - "selfprivacy-api", - "restic", - "redis", - "pleroma", - "ocserv", - "nextcloud", - "memcached", - "knot-resolver", - "gitea", - "bitwarden_rs", - "vaultwarden", - "acme", - "virtualMail", - "nixbld1", - "nixbld2", - "nixbld29", - "nobody", -] - - -## FIXTURES ################################################### - - -@pytest.fixture -def no_users(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "no_users.json") - assert read_json(datadir / "no_users.json")["users"] == [] - return datadir - - -@pytest.fixture -def one_user(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "one_user.json") - assert read_json(datadir / "one_user.json")["users"] == [ - { - "username": "user1", - "hashedPassword": "HASHED_PASSWORD_1", - "sshKeys": ["ssh-rsa KEY user1@pc"], - } - ] - return datadir - - -@pytest.fixture -def some_users(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "some_users.json") - assert read_json(datadir / "some_users.json")["users"] == [ - { - "username": "user1", - "hashedPassword": "HASHED_PASSWORD_1", - "sshKeys": ["ssh-rsa KEY user1@pc"], - }, - {"username": "user2", "hashedPassword": "HASHED_PASSWORD_2", "sshKeys": []}, - {"username": "user3", "hashedPassword": "HASHED_PASSWORD_3"}, - ] - return datadir - - -@pytest.fixture -def undefined_settings(mocker, datadir): - mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "undefined.json") - assert "users" not in read_json(datadir / "undefined.json") - return datadir - - -class ProcessMock: - """Mock subprocess.Popen""" - - def __init__(self, args, **kwargs): - self.args = args - self.kwargs = kwargs - - def communicate(): - return (b"NEW_HASHED", None) - - returncode = 0 - - -@pytest.fixture -def mock_subprocess_popen(mocker): - mock = mocker.patch("subprocess.Popen", autospec=True, return_value=ProcessMock) - return mock - - -## TESTS ###################################################### - - -def test_get_users_unauthorized(client, some_users, mock_subprocess_popen): - response = client.get("/users") - assert response.status_code == 401 - - -def test_get_some_users(authorized_client, some_users, mock_subprocess_popen): - response = authorized_client.get("/users") - assert response.status_code == 200 - assert response.json() == ["user1", "user2", "user3"] - - -def test_get_one_user(authorized_client, one_user, mock_subprocess_popen): - response = authorized_client.get("/users") - assert response.status_code == 200 - assert response.json() == ["user1"] - - -def test_get_one_user_with_main(authorized_client, one_user, mock_subprocess_popen): - response = authorized_client.get("/users?withMainUser=true") - assert response.status_code == 200 - assert response.json().sort() == ["tester", "user1"].sort() - - -def test_get_no_users(authorized_client, no_users, mock_subprocess_popen): - response = authorized_client.get("/users") - assert response.status_code == 200 - assert response.json() == [] - - -def test_get_no_users_with_main(authorized_client, no_users, mock_subprocess_popen): - response = authorized_client.get("/users?withMainUser=true") - assert response.status_code == 200 - assert response.json() == ["tester"] - - -def test_get_undefined_users( - authorized_client, undefined_settings, mock_subprocess_popen -): - response = authorized_client.get("/users") - assert response.status_code == 200 - assert response.json() == [] - - -def test_post_users_unauthorized(client, some_users, mock_subprocess_popen): - response = client.post("/users") - assert response.status_code == 401 - - -def test_post_one_user(authorized_client, one_user, mock_subprocess_popen): - response = authorized_client.post( - "/users", json={"username": "user4", "password": "password"} - ) - assert response.status_code == 201 - assert read_json(one_user / "one_user.json")["users"] == [ - { - "username": "user1", - "hashedPassword": "HASHED_PASSWORD_1", - "sshKeys": ["ssh-rsa KEY user1@pc"], - }, - { - "username": "user4", - "sshKeys": [], - "hashedPassword": "NEW_HASHED", - }, - ] - - -def test_post_without_username(authorized_client, one_user, mock_subprocess_popen): - response = authorized_client.post("/users", json={"password": "password"}) - assert response.status_code == 422 - - -def test_post_without_password(authorized_client, one_user, mock_subprocess_popen): - response = authorized_client.post("/users", json={"username": "user4"}) - assert response.status_code == 422 - - -def test_post_without_username_and_password( - authorized_client, one_user, mock_subprocess_popen -): - response = authorized_client.post("/users", json={}) - assert response.status_code == 422 - - -@pytest.mark.parametrize("username", invalid_usernames) -def test_post_system_user(authorized_client, one_user, mock_subprocess_popen, username): - response = authorized_client.post( - "/users", json={"username": username, "password": "password"} - ) - assert response.status_code == 409 - - -def test_post_existing_user(authorized_client, one_user, mock_subprocess_popen): - response = authorized_client.post( - "/users", json={"username": "user1", "password": "password"} - ) - assert response.status_code == 409 - - -def test_post_existing_main_user(authorized_client, one_user, mock_subprocess_popen): - response = authorized_client.post( - "/users", json={"username": "tester", "password": "password"} - ) - assert response.status_code == 409 - - -def test_post_user_to_undefined_users( - authorized_client, undefined_settings, mock_subprocess_popen -): - response = authorized_client.post( - "/users", json={"username": "user4", "password": "password"} - ) - assert response.status_code == 201 - assert read_json(undefined_settings / "undefined.json")["users"] == [ - {"username": "user4", "sshKeys": [], "hashedPassword": "NEW_HASHED"} - ] - - -def test_post_very_long_username(authorized_client, one_user, mock_subprocess_popen): - response = authorized_client.post( - "/users", json={"username": "a" * 32, "password": "password"} - ) - assert response.status_code == 400 - - -@pytest.mark.parametrize("username", ["", "1", "фыр", "user1@", "№:%##$^&@$&^()_"]) -def test_post_invalid_username( - authorized_client, one_user, mock_subprocess_popen, username -): - response = authorized_client.post( - "/users", json={"username": username, "password": "password"} - ) - assert response.status_code == 400 - - -def test_delete_user_unauthorized(client, some_users, mock_subprocess_popen): - response = client.delete("/users/user1") - assert response.status_code == 401 - - -def test_delete_user_not_found(authorized_client, some_users, mock_subprocess_popen): - response = authorized_client.delete("/users/user4") - assert response.status_code == 404 - - -def test_delete_user(authorized_client, some_users, mock_subprocess_popen): - response = authorized_client.delete("/users/user1") - assert response.status_code == 200 - assert read_json(some_users / "some_users.json")["users"] == [ - {"username": "user2", "hashedPassword": "HASHED_PASSWORD_2", "sshKeys": []}, - {"username": "user3", "hashedPassword": "HASHED_PASSWORD_3"}, - ] - - -@pytest.mark.parametrize("username", invalid_usernames) -def test_delete_system_user( - authorized_client, some_users, mock_subprocess_popen, username -): - response = authorized_client.delete("/users/" + username) - assert response.status_code == 400 or response.status_code == 404 - - -def test_delete_main_user(authorized_client, some_users, mock_subprocess_popen): - response = authorized_client.delete("/users/tester") - assert response.status_code == 400 - - -def test_delete_just_delete(authorized_client, some_users, mock_subprocess_popen): - response = authorized_client.delete("/users") - assert response.status_code == 405 diff --git a/tests/test_rest_endpoints/test_users/no_users.json b/tests/test_rest_endpoints/test_users/no_users.json deleted file mode 100644 index 5929a79..0000000 --- a/tests/test_rest_endpoints/test_users/no_users.json +++ /dev/null @@ -1,59 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "users": [ - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/test_users/one_user.json b/tests/test_rest_endpoints/test_users/one_user.json deleted file mode 100644 index 6c553bc..0000000 --- a/tests/test_rest_endpoints/test_users/one_user.json +++ /dev/null @@ -1,66 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "users": [ - { - "username": "user1", - "hashedPassword": "HASHED_PASSWORD_1", - "sshKeys": [ - "ssh-rsa KEY user1@pc" - ] - } - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/test_users/some_users.json b/tests/test_rest_endpoints/test_users/some_users.json deleted file mode 100644 index df6380a..0000000 --- a/tests/test_rest_endpoints/test_users/some_users.json +++ /dev/null @@ -1,76 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "users": [ - { - "username": "user1", - "hashedPassword": "HASHED_PASSWORD_1", - "sshKeys": [ - "ssh-rsa KEY user1@pc" - ] - }, - { - "username": "user2", - "hashedPassword": "HASHED_PASSWORD_2", - "sshKeys": [ - ] - }, - { - "username": "user3", - "hashedPassword": "HASHED_PASSWORD_3" - } - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_rest_endpoints/test_users/undefined.json b/tests/test_rest_endpoints/test_users/undefined.json deleted file mode 100644 index c1691ea..0000000 --- a/tests/test_rest_endpoints/test_users/undefined.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "api": { - "token": "TEST_TOKEN", - "enableSwagger": false - }, - "bitwarden": { - "enable": false - }, - "databasePassword": "PASSWORD", - "domain": "test.tld", - "hashedMasterPassword": "HASHED_PASSWORD", - "hostname": "test-instance", - "nextcloud": { - "adminPassword": "ADMIN", - "databasePassword": "ADMIN", - "enable": true - }, - "resticPassword": "PASS", - "ssh": { - "enable": true, - "passwordAuthentication": true, - "rootKeys": [ - "ssh-ed25519 KEY test@pc" - ] - }, - "username": "tester", - "gitea": { - "enable": false - }, - "ocserv": { - "enable": true - }, - "pleroma": { - "enable": true - }, - "autoUpgrade": { - "enable": true, - "allowReboot": true - }, - "timezone": "Europe/Moscow", - "sshKeys": [ - "ssh-rsa KEY test@pc" - ], - "dns": { - "provider": "CLOUDFLARE", - "apiKey": "TOKEN" - }, - "server": { - "provider": "HETZNER" - }, - "backup": { - "provider": "BACKBLAZE", - "accountId": "ID", - "accountKey": "KEY", - "bucket": "selfprivacy" - } -} \ No newline at end of file diff --git a/tests/test_services.py b/tests/test_services.py index b83a7f2..c5eff66 100644 --- a/tests/test_services.py +++ b/tests/test_services.py @@ -1,18 +1,25 @@ """ Tests for generic service methods """ +import pytest from pytest import raises +from selfprivacy_api.utils import ReadUserData, WriteUserData +from selfprivacy_api.utils.waitloop import wait_until_true + +import selfprivacy_api.services as services_module + from selfprivacy_api.services.bitwarden import Bitwarden from selfprivacy_api.services.pleroma import Pleroma +from selfprivacy_api.services.mailserver import MailServer from selfprivacy_api.services.owned_path import OwnedPath from selfprivacy_api.services.generic_service_mover import FolderMoveNames from selfprivacy_api.services.test_service import DummyService from selfprivacy_api.services.service import Service, ServiceStatus, StoppedService -from selfprivacy_api.utils.waitloop import wait_until_true +from selfprivacy_api.services import get_enabled_services -from tests.test_graphql.test_backup import raw_dummy_service +from tests.test_dkim import dkim_file, no_dkim_file def test_unimplemented_folders_raises(): @@ -87,3 +94,88 @@ def test_foldermoves_from_ownedpaths(): group="vaultwarden", owner="vaultwarden", ) + + +def test_enabling_disabling_reads_json(dummy_service: DummyService): + with WriteUserData() as data: + data["modules"][dummy_service.get_id()]["enable"] = False + assert dummy_service.is_enabled() is False + with WriteUserData() as data: + data["modules"][dummy_service.get_id()]["enable"] = True + assert dummy_service.is_enabled() is True + + +# A helper to test undefined states. Used in fixtures below +def undefine_service_enabled_status(param, dummy_service): + if param == "deleted_attribute": + with WriteUserData() as data: + del data["modules"][dummy_service.get_id()]["enable"] + if param == "service_not_in_json": + with WriteUserData() as data: + del data["modules"][dummy_service.get_id()] + if param == "modules_not_in_json": + with WriteUserData() as data: + del data["modules"] + + +# May be defined or not +@pytest.fixture( + params=[ + "normally_enabled", + "deleted_attribute", + "service_not_in_json", + "modules_not_in_json", + ] +) +def possibly_dubiously_enabled_service( + dummy_service: DummyService, request +) -> DummyService: + if request.param != "normally_enabled": + undefine_service_enabled_status(request.param, dummy_service) + return dummy_service + + +# Strictly UNdefined +@pytest.fixture( + params=["deleted_attribute", "service_not_in_json", "modules_not_in_json"] +) +def undefined_enabledness_service(dummy_service: DummyService, request) -> DummyService: + undefine_service_enabled_status(request.param, dummy_service) + return dummy_service + + +def test_undefined_enabledness_in_json_means_False( + undefined_enabledness_service: DummyService, +): + dummy_service = undefined_enabledness_service + assert dummy_service.is_enabled() is False + + +def test_enabling_disabling_writes_json( + possibly_dubiously_enabled_service: DummyService, +): + dummy_service = possibly_dubiously_enabled_service + + dummy_service.disable() + with ReadUserData() as data: + assert data["modules"][dummy_service.get_id()]["enable"] is False + dummy_service.enable() + with ReadUserData() as data: + assert data["modules"][dummy_service.get_id()]["enable"] is True + dummy_service.disable() + with ReadUserData() as data: + assert data["modules"][dummy_service.get_id()]["enable"] is False + + +# more detailed testing of this is in test_graphql/test_system.py +def test_mailserver_with_dkim_returns_some_dns(dkim_file): + records = MailServer().get_dns_records() + assert len(records) > 0 + + +def test_mailserver_with_no_dkim_returns_no_dns(no_dkim_file): + assert MailServer().get_dns_records() == [] + + +def test_services_enabled_by_default(generic_userdata): + assert set(get_enabled_services()) == set(services_module.services) diff --git a/tests/test_services_systemctl.py b/tests/test_services_systemctl.py new file mode 100644 index 0000000..8b247e0 --- /dev/null +++ b/tests/test_services_systemctl.py @@ -0,0 +1,94 @@ +import pytest + +from selfprivacy_api.services.service import ServiceStatus +from selfprivacy_api.services.bitwarden import Bitwarden +from selfprivacy_api.services.gitea import Gitea +from selfprivacy_api.services.mailserver import MailServer +from selfprivacy_api.services.nextcloud import Nextcloud +from selfprivacy_api.services.ocserv import Ocserv +from selfprivacy_api.services.pleroma import Pleroma + + +def expected_status_call(service_name: str): + return ["systemctl", "show", service_name] + + +def call_args_asserts(mocked_object): + assert mocked_object.call_count == 7 + calls = [callargs[0][0] for callargs in mocked_object.call_args_list] + assert calls == [ + expected_status_call(service) + for service in [ + "dovecot2.service", + "postfix.service", + "vaultwarden.service", + "gitea.service", + "phpfpm-nextcloud.service", + "ocserv.service", + "pleroma.service", + ] + ] + + +SUCCESSFUL_STATUS = b""" +Type=oneshot +ExitType=main +Restart=no +NotifyAccess=none +RestartUSec=100ms +LoadState=loaded +ActiveState=active +FreezerState=running +SubState=exited +""" + +FAILED_STATUS = b""" +Type=oneshot +ExitType=main +Restart=no +NotifyAccess=none +RestartUSec=100ms +LoadState=loaded +ActiveState=failed +FreezerState=running +SubState=exited +""" + + +@pytest.fixture +def mock_popen_systemctl_service_ok(mocker): + mock = mocker.patch( + "subprocess.check_output", autospec=True, return_value=SUCCESSFUL_STATUS + ) + return mock + + +@pytest.fixture +def mock_popen_systemctl_service_not_ok(mocker): + mock = mocker.patch( + "subprocess.check_output", autospec=True, return_value=FAILED_STATUS + ) + return mock + + +############################################################################### + + +def test_systemctl_ok(mock_popen_systemctl_service_ok): + assert MailServer.get_status() == ServiceStatus.ACTIVE + assert Bitwarden.get_status() == ServiceStatus.ACTIVE + assert Gitea.get_status() == ServiceStatus.ACTIVE + assert Nextcloud.get_status() == ServiceStatus.ACTIVE + assert Ocserv.get_status() == ServiceStatus.ACTIVE + assert Pleroma.get_status() == ServiceStatus.ACTIVE + call_args_asserts(mock_popen_systemctl_service_ok) + + +def test_systemctl_failed_service(mock_popen_systemctl_service_not_ok): + assert MailServer.get_status() == ServiceStatus.FAILED + assert Bitwarden.get_status() == ServiceStatus.FAILED + assert Gitea.get_status() == ServiceStatus.FAILED + assert Nextcloud.get_status() == ServiceStatus.FAILED + assert Ocserv.get_status() == ServiceStatus.FAILED + assert Pleroma.get_status() == ServiceStatus.FAILED + call_args_asserts(mock_popen_systemctl_service_not_ok) diff --git a/tests/test_ssh.py b/tests/test_ssh.py new file mode 100644 index 0000000..2d0f70d --- /dev/null +++ b/tests/test_ssh.py @@ -0,0 +1,430 @@ +""" +Action-level tests of ssh +(For API-independent logic incl. connection to persistent storage) +""" + +import pytest +from typing import Optional + +from selfprivacy_api.actions.ssh import ( + set_ssh_settings, + get_ssh_settings, + create_ssh_key, + remove_ssh_key, + KeyNotFound, + UserNotFound, +) +from selfprivacy_api.actions.users import ( + get_users, + get_user_by_username, + UserDataUserOrigin, +) +from selfprivacy_api.utils import WriteUserData, ReadUserData + + +@pytest.fixture(params=[True, False]) +def bool_value(request): + return request.param + + +@pytest.fixture( + params=[ + "normal_populated_json", + "deleted_enabled", + "deleted_auth", + "empty", + "ssh_not_in_json", + ] +) +def possibly_undefined_ssh_settings(generic_userdata, request, bool_value): + with WriteUserData() as data: + data["ssh"] = {"enable": bool_value, "passswordAuthentication": bool_value} + assert get_raw_json_ssh_setting("enable") == bool_value + assert get_raw_json_ssh_setting("passswordAuthentication") == bool_value + + if request.param == "deleted_enabled": + with WriteUserData() as data: + del data["ssh"]["enable"] + + if request.param == "deleted_auth": + with WriteUserData() as data: + del data["ssh"]["passswordAuthentication"] + + if request.param == "empty": + with WriteUserData() as data: + del data["ssh"]["passswordAuthentication"] + del data["ssh"]["enable"] + + if request.param == "ssh_not_in_json": + with WriteUserData() as data: + del data["ssh"] + + +@pytest.fixture(params=[True, False, None]) +def ssh_enable_spectrum(request): + return request.param + + +@pytest.fixture(params=[True, False, None]) +def password_auth_spectrum(request): + return request.param + + +def admin_name() -> Optional[str]: + users = get_users() + for user in users: + if user.origin == UserDataUserOrigin.PRIMARY: + return user.username + return None + + +def get_raw_json_ssh_setting(setting: str): + with ReadUserData() as data: + return (data.get("ssh") or {}).get(setting) + + +def test_read_json(possibly_undefined_ssh_settings): + with ReadUserData() as data: + if "ssh" not in data.keys(): + assert get_ssh_settings().enable is not None + assert get_ssh_settings().passwordAuthentication is not None + + # TODO: Is it really a good idea to have password ssh enabled by default? + assert get_ssh_settings().enable is True + assert get_ssh_settings().passwordAuthentication is True + return + + if "enable" not in data["ssh"].keys(): + assert get_ssh_settings().enable is True + else: + assert get_ssh_settings().enable == data["ssh"]["enable"] + + if "passwordAuthentication" not in data["ssh"].keys(): + assert get_ssh_settings().passwordAuthentication is False + else: + assert ( + get_ssh_settings().passwordAuthentication + == data["ssh"]["passwordAuthentication"] + ) + + +def test_enabling_disabling_writes_json( + possibly_undefined_ssh_settings, ssh_enable_spectrum, password_auth_spectrum +): + original_enable = get_raw_json_ssh_setting("enable") + original_password_auth = get_raw_json_ssh_setting("passwordAuthentication") + + set_ssh_settings(ssh_enable_spectrum, password_auth_spectrum) + + with ReadUserData() as data: + if ssh_enable_spectrum is None: + assert get_raw_json_ssh_setting("enable") == original_enable + else: + assert get_raw_json_ssh_setting("enable") == ssh_enable_spectrum + + if password_auth_spectrum is None: + assert ( + get_raw_json_ssh_setting("passwordAuthentication") + == original_password_auth + ) + else: + assert ( + get_raw_json_ssh_setting("passwordAuthentication") + == password_auth_spectrum + ) + + +############### ROOTKEYS + + +def test_read_root_keys_from_json(generic_userdata): + assert get_ssh_settings().rootKeys == ["ssh-ed25519 KEY test@pc"] + new_keys = ["ssh-ed25519 KEY test@pc", "ssh-ed25519 KEY2 test@pc"] + + with WriteUserData() as data: + data["ssh"]["rootKeys"] = new_keys + + assert get_ssh_settings().rootKeys == new_keys + + with WriteUserData() as data: + del data["ssh"]["rootKeys"] + + assert get_ssh_settings().rootKeys == [] + + with WriteUserData() as data: + del data["ssh"] + + assert get_ssh_settings().rootKeys == [] + + +def test_removing_root_key_writes_json(generic_userdata): + # generic userdata has a a single root key + rootkeys = get_ssh_settings().rootKeys + assert len(rootkeys) == 1 + key1 = rootkeys[0] + key2 = "ssh-rsa MYSUPERKEY root@pc" + + create_ssh_key("root", key2) + rootkeys = get_ssh_settings().rootKeys + assert len(rootkeys) == 2 + + remove_ssh_key("root", key2) + with ReadUserData() as data: + assert "ssh" in data + assert "rootKeys" in data["ssh"] + assert data["ssh"]["rootKeys"] == [key1] + + remove_ssh_key("root", key1) + with ReadUserData() as data: + assert "ssh" in data + assert "rootKeys" in data["ssh"] + assert data["ssh"]["rootKeys"] == [] + + +def test_remove_root_key_on_undefined(generic_userdata): + # generic userdata has a a single root key + rootkeys = get_ssh_settings().rootKeys + assert len(rootkeys) == 1 + key1 = rootkeys[0] + + with WriteUserData() as data: + del data["ssh"]["rootKeys"] + + with pytest.raises(KeyNotFound): + remove_ssh_key("root", key1) + rootkeys = get_ssh_settings().rootKeys + assert len(rootkeys) == 0 + + with WriteUserData() as data: + del data["ssh"] + + with pytest.raises(KeyNotFound): + remove_ssh_key("root", key1) + rootkeys = get_ssh_settings().rootKeys + assert len(rootkeys) == 0 + + +def test_adding_root_key_writes_json(generic_userdata): + with WriteUserData() as data: + del data["ssh"] + key1 = "ssh-ed25519 KEY test@pc" + key2 = "ssh-ed25519 KEY2 test@pc" + create_ssh_key("root", key1) + + with ReadUserData() as data: + assert "ssh" in data + assert "rootKeys" in data["ssh"] + assert data["ssh"]["rootKeys"] == [key1] + + with WriteUserData() as data: + del data["ssh"]["rootKeys"] + create_ssh_key("root", key1) + + with ReadUserData() as data: + assert "ssh" in data + assert "rootKeys" in data["ssh"] + assert data["ssh"]["rootKeys"] == [key1] + + create_ssh_key("root", key2) + + with ReadUserData() as data: + assert "ssh" in data + assert "rootKeys" in data["ssh"] + # order is irrelevant + assert set(data["ssh"]["rootKeys"]) == set([key1, key2]) + + +############### ADMIN KEYS + + +def test_read_admin_keys_from_json(generic_userdata): + admin_name = "tester" + assert get_user_by_username(admin_name).ssh_keys == ["ssh-rsa KEY test@pc"] + new_keys = ["ssh-rsa KEY test@pc", "ssh-ed25519 KEY2 test@pc"] + + with WriteUserData() as data: + data["sshKeys"] = new_keys + + assert get_user_by_username(admin_name).ssh_keys == new_keys + + with WriteUserData() as data: + del data["sshKeys"] + + assert get_user_by_username(admin_name).ssh_keys == [] + + +def test_adding_admin_key_writes_json(generic_userdata): + admin_name = "tester" + + with WriteUserData() as data: + del data["sshKeys"] + key1 = "ssh-ed25519 KEY test@pc" + key2 = "ssh-ed25519 KEY2 test@pc" + create_ssh_key(admin_name, key1) + + with ReadUserData() as data: + assert "sshKeys" in data + assert data["sshKeys"] == [key1] + + create_ssh_key(admin_name, key2) + + with ReadUserData() as data: + assert "sshKeys" in data + # order is irrelevant + assert set(data["sshKeys"]) == set([key1, key2]) + + +def test_removing_admin_key_writes_json(generic_userdata): + # generic userdata has a a single admin key + admin_name = "tester" + + admin_keys = get_user_by_username(admin_name).ssh_keys + assert len(admin_keys) == 1 + key1 = admin_keys[0] + key2 = "ssh-rsa MYSUPERKEY admin@pc" + + create_ssh_key(admin_name, key2) + admin_keys = get_user_by_username(admin_name).ssh_keys + assert len(admin_keys) == 2 + + remove_ssh_key(admin_name, key2) + + with ReadUserData() as data: + assert "sshKeys" in data + assert data["sshKeys"] == [key1] + + remove_ssh_key(admin_name, key1) + with ReadUserData() as data: + assert "sshKeys" in data + assert data["sshKeys"] == [] + + +def test_remove_admin_key_on_undefined(generic_userdata): + # generic userdata has a a single admin key + admin_name = "tester" + + admin_keys = get_user_by_username(admin_name).ssh_keys + assert len(admin_keys) == 1 + key1 = admin_keys[0] + + with WriteUserData() as data: + del data["sshKeys"] + + with pytest.raises(KeyNotFound): + remove_ssh_key(admin_name, key1) + admin_keys = get_user_by_username(admin_name).ssh_keys + assert len(admin_keys) == 0 + + +############### USER KEYS + +regular_users = ["user1", "user2", "user3"] + + +def find_user_index_in_json_users(users: list, username: str) -> Optional[int]: + for i, user in enumerate(users): + if user["username"] == username: + return i + return None + + +@pytest.mark.parametrize("username", regular_users) +def test_read_user_keys_from_json(generic_userdata, username): + old_keys = [f"ssh-rsa KEY {username}@pc"] + assert get_user_by_username(username).ssh_keys == old_keys + new_keys = ["ssh-rsa KEY test@pc", "ssh-ed25519 KEY2 test@pc"] + + with WriteUserData() as data: + user_index = find_user_index_in_json_users(data["users"], username) + data["users"][user_index]["sshKeys"] = new_keys + + assert get_user_by_username(username).ssh_keys == new_keys + + with WriteUserData() as data: + user_index = find_user_index_in_json_users(data["users"], username) + del data["users"][user_index]["sshKeys"] + + assert get_user_by_username(username).ssh_keys == [] + + # deeper deletions are for user getter tests, not here + + +@pytest.mark.parametrize("username", regular_users) +def test_adding_user_key_writes_json(generic_userdata, username): + with WriteUserData() as data: + user_index = find_user_index_in_json_users(data["users"], username) + del data["users"][user_index]["sshKeys"] + key1 = "ssh-ed25519 KEY test@pc" + key2 = "ssh-ed25519 KEY2 test@pc" + create_ssh_key(username, key1) + + with ReadUserData() as data: + user_index = find_user_index_in_json_users(data["users"], username) + assert "sshKeys" in data["users"][user_index] + assert data["users"][user_index]["sshKeys"] == [key1] + + create_ssh_key(username, key2) + + with ReadUserData() as data: + user_index = find_user_index_in_json_users(data["users"], username) + assert "sshKeys" in data["users"][user_index] + # order is irrelevant + assert set(data["users"][user_index]["sshKeys"]) == set([key1, key2]) + + +@pytest.mark.parametrize("username", regular_users) +def test_removing_user_key_writes_json(generic_userdata, username): + # generic userdata has a a single user key + + user_keys = get_user_by_username(username).ssh_keys + assert len(user_keys) == 1 + key1 = user_keys[0] + key2 = "ssh-rsa MYSUPERKEY admin@pc" + + create_ssh_key(username, key2) + user_keys = get_user_by_username(username).ssh_keys + assert len(user_keys) == 2 + + remove_ssh_key(username, key2) + + with ReadUserData() as data: + user_index = find_user_index_in_json_users(data["users"], username) + assert "sshKeys" in data["users"][user_index] + assert data["users"][user_index]["sshKeys"] == [key1] + + remove_ssh_key(username, key1) + with ReadUserData() as data: + user_index = find_user_index_in_json_users(data["users"], username) + assert "sshKeys" in data["users"][user_index] + assert data["users"][user_index]["sshKeys"] == [] + + +@pytest.mark.parametrize("username", regular_users) +def test_remove_user_key_on_undefined(generic_userdata, username): + # generic userdata has a a single user key + user_keys = get_user_by_username(username).ssh_keys + assert len(user_keys) == 1 + key1 = user_keys[0] + + with WriteUserData() as data: + user_index = find_user_index_in_json_users(data["users"], username) + del data["users"][user_index]["sshKeys"] + + with pytest.raises(KeyNotFound): + remove_ssh_key(username, key1) + + user_keys = get_user_by_username(username).ssh_keys + assert len(user_keys) == 0 + + with WriteUserData() as data: + user_index = find_user_index_in_json_users(data["users"], username) + del data["users"][user_index] + + with pytest.raises(UserNotFound): + remove_ssh_key(username, key1) + + with WriteUserData() as data: + del data["users"] + + with pytest.raises(UserNotFound): + remove_ssh_key(username, key1) diff --git a/tests/test_system.py b/tests/test_system.py new file mode 100644 index 0000000..549692e --- /dev/null +++ b/tests/test_system.py @@ -0,0 +1,22 @@ +import pytest +from selfprivacy_api.actions.system import run_blocking, ShellException + +# uname is just an arbitrary command expected to be everywhere we care + + +def test_uname(): + output = run_blocking(["uname"]) + assert output is not None + + +def test_uname_new_session(): + output = run_blocking(["uname"], new_session=True) + assert output is not None + + +def test_uname_nonexistent_args(): + with pytest.raises(ShellException) as exception_info: + # uname: extra operand ‘sldfkjsljf’ + # Try 'uname --help' for more information + run_blocking(["uname", "isdyfhishfaisljhkeysmash"], new_session=True) + assert "extra operand" in exception_info.value.args[0] diff --git a/tests/test_users.py b/tests/test_users.py new file mode 100644 index 0000000..3d7f38f --- /dev/null +++ b/tests/test_users.py @@ -0,0 +1,27 @@ +from selfprivacy_api.utils import ReadUserData, WriteUserData +from selfprivacy_api.actions.users import delete_user + +""" + A place for user storage tests and other user tests that are not Graphql-specific. +""" + +# yes it is an incomplete suite. +# It was born in order to not lose things that REST API tests checked for +# In the future, user storage tests that are not dependent on actual API (graphql or otherwise) go here. + + +def test_delete_user_writes_json(generic_userdata): + delete_user("user2") + with ReadUserData() as data: + assert data["users"] == [ + { + "username": "user1", + "hashedPassword": "HASHED_PASSWORD_1", + "sshKeys": ["ssh-rsa KEY user1@pc"], + }, + { + "username": "user3", + "hashedPassword": "HASHED_PASSWORD_3", + "sshKeys": ["ssh-rsa KEY user3@pc"], + }, + ]