Merge branch 'master' into def/nix-collect-garbage-endpoint

This commit is contained in:
dettlaff 2023-04-05 13:52:48 +03:00
commit f781b51a6f
105 changed files with 2368 additions and 1511 deletions

View file

@ -5,12 +5,16 @@ name: default
steps: steps:
- name: Run Tests and Generate Coverage Report - name: Run Tests and Generate Coverage Report
commands: commands:
- kill $(ps aux | grep '[r]edis-server 127.0.0.1:6389' | awk '{print $2}')
- redis-server --bind 127.0.0.1 --port 6389 >/dev/null &
- coverage run -m pytest -q - coverage run -m pytest -q
- coverage xml - coverage xml
- sonar-scanner -Dsonar.projectKey=SelfPrivacy-REST-API -Dsonar.sources=. -Dsonar.host.url=http://analyzer.lan:9000 -Dsonar.login="$SONARQUBE_TOKEN" - sonar-scanner -Dsonar.projectKey=SelfPrivacy-REST-API -Dsonar.sources=. -Dsonar.host.url=http://analyzer.lan:9000 -Dsonar.login="$SONARQUBE_TOKEN"
environment: environment:
SONARQUBE_TOKEN: SONARQUBE_TOKEN:
from_secret: SONARQUBE_TOKEN from_secret: SONARQUBE_TOKEN
USE_REDIS_PORT: 6389
- name: Run Bandit Checks - name: Run Bandit Checks
commands: commands:

8
.idea/.gitignore vendored Normal file
View file

@ -0,0 +1,8 @@
# Default ignored files
/shelf/
/workspace.xml
# Editor-based HTTP Client requests
/httpRequests/
# Datasource local storage ignored files
/dataSources/
/dataSources.local.xml

View file

@ -0,0 +1,6 @@
<component name="InspectionProjectProfileManager">
<settings>
<option name="USE_PROJECT_PROFILE" value="false" />
<version value="1.0" />
</settings>
</component>

4
.idea/misc.xml Normal file
View file

@ -0,0 +1,4 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.9" project-jdk-type="Python SDK" />
</project>

8
.idea/modules.xml Normal file
View file

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/selfprivacy-rest-api.iml" filepath="$PROJECT_DIR$/.idea/selfprivacy-rest-api.iml" />
</modules>
</component>
</project>

View file

@ -0,0 +1,15 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$" />
<orderEntry type="jdk" jdkName="Python 3.9" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
<component name="PyDocumentationSettings">
<option name="format" value="PLAIN" />
<option name="myDocStringFormat" value="Plain" />
</component>
<component name="TestRunnerService">
<option name="PROJECT_TEST_RUNNER" value="py.test" />
</component>
</module>

12
.idea/vcs.xml Normal file
View file

@ -0,0 +1,12 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="CommitMessageInspectionProfile">
<profile version="1.0">
<inspection_tool class="CommitFormat" enabled="true" level="WARNING" enabled_by_default="true" />
<inspection_tool class="CommitNamingConvention" enabled="true" level="WARNING" enabled_by_default="true" />
</profile>
</component>
<component name="VcsDirectoryMappings">
<mapping directory="" vcs="Git" />
</component>
</project>

88
CONTRIBUTING.md Normal file
View file

@ -0,0 +1,88 @@
# SelfPrivacy API contributors guide
Instructions for [VScode](https://code.visualstudio.com) or [VScodium](https://github.com/VSCodium/vscodium) under Unix-like platform.
1. **To get started, create an account for yourself on the** [**SelfPrivacy Gitea**](https://git.selfprivacy.org/user/sign_up). Proceed to fork
the [repository](https://git.selfprivacy.org/SelfPrivacy/selfprivacy-rest-api), and clone it on your local computer:
```git clone https://git.selfprivacy.org/your_user_name/selfprivacy-rest-api```
2. **Install Nix**
```sh <(curl -L https://nixos.org/nix/install)```
For detailed installation information, please review and follow: [link](https://nixos.org/manual/nix/stable/installation/installing-binary.html#installing-a-binary-distribution).
3. **Change directory to the cloned repository and start a nix shell:**
```cd selfprivacy-rest-api && nix-shell```
Nix will install all of the necessary packages for development work, all further actions will take place only within nix-shell.
4. **Install these plugins for VScode/VScodium**
Required: ```ms-python.python```, ```ms-python.vscode-pylance```
Optional, but highly recommended: ```ms-python.black-formatter```, ```bbenoist.Nix```, ```ryanluker.vscode-coverage-gutters```
5. **Set the path to the python interpreter from the nix store.** To do this, execute the command:
```whereis python```
Copy the path that starts with ```/nix/store/``` and ends with ```env/bin/python```
```/nix/store/???-python3-3.9.??-env/bin/python```
Click on the python version selection in the lower right corner, and replace the path to the interpreter in the project with the one you copied from the terminal.
6. **Congratulations :) Now you can develop new changes and test the project locally in a Nix environment.**
## What do you need to know before starting development work?
- RestAPI is no longer utilized, the project has moved to [GraphQL](https://graphql.org), however, the API functionality still works on Rest
## What to do after making changes to the repository?
**Run unit tests** using ```pytest .```
Make sure that all tests pass successfully and the API works correctly. For convenience, you can use the built-in VScode interface.
How to review the percentage of code coverage? Execute the command:
```coverage run -m pytest && coverage xml && coverage report```
Next, use the recommended extension ```ryanluker.vscode-coverage-gutters```, navigate to one of the test files, and click the "watch" button on the bottom panel of VScode.
**Format (linting) code**, we use [black](https://pypi.org/project/black/) formatting, enter
```black .``` to automatically format files, or use the recommended extension.
**And please remember, we have adopted** [**commit naming convention**](https://www.conventionalcommits.org/en/v1.0.0/), follow the link for more information.
Please request a review from at least one of the other maintainers. If you are not sure who to request, request a review from SelfPrivacy/Devs team.
## Helpful links!
**SelfPrivacy Contributor chat :3**
- [**Telegram:** @selfprivacy_dev](https://t.me/selfprivacy_dev)
- [**Matrix:** #dev:selfprivacy.org](https://matrix.to/#/#dev:selfprivacy.org)
**Helpful material to review:**
- [GraphQL Query Language Documentation](https://graphql.org/)
- [Documentation Strawberry - python library for working with GraphQL](https://strawberry.rocks/docs/)
- [Nix Documentation](https://nixos.org/guides/ad-hoc-developer-environments.html)
### Track your time
If you are working on a task, please track your time and add it to the commit message. For example:
```
feat: add new feature
- did some work
- did some more work
fixes #4, spent @1h30m
```
[Timewarrior](https://timewarrior.net/) is a good tool for tracking time.

View file

@ -2,20 +2,19 @@
from datetime import datetime from datetime import datetime
from typing import Optional from typing import Optional
from pydantic import BaseModel from pydantic import BaseModel
from mnemonic import Mnemonic
from selfprivacy_api.repositories.tokens.json_tokens_repository import (
from selfprivacy_api.utils.auth import ( JsonTokensRepository,
delete_token,
generate_recovery_token,
get_recovery_token_status,
get_tokens_info,
is_recovery_token_exists,
is_recovery_token_valid,
is_token_name_exists,
is_token_name_pair_valid,
refresh_token,
get_token_name,
) )
from selfprivacy_api.repositories.tokens.exceptions import (
TokenNotFound,
RecoveryKeyNotFound,
InvalidMnemonic,
NewDeviceKeyNotFound,
)
TOKEN_REPO = JsonTokensRepository()
class TokenInfoWithIsCaller(BaseModel): class TokenInfoWithIsCaller(BaseModel):
@ -28,18 +27,23 @@ class TokenInfoWithIsCaller(BaseModel):
def get_api_tokens_with_caller_flag(caller_token: str) -> list[TokenInfoWithIsCaller]: def get_api_tokens_with_caller_flag(caller_token: str) -> list[TokenInfoWithIsCaller]:
"""Get the tokens info""" """Get the tokens info"""
caller_name = get_token_name(caller_token) caller_name = TOKEN_REPO.get_token_by_token_string(caller_token).device_name
tokens = get_tokens_info() tokens = TOKEN_REPO.get_tokens()
return [ return [
TokenInfoWithIsCaller( TokenInfoWithIsCaller(
name=token.name, name=token.device_name,
date=token.date, date=token.created_at,
is_caller=token.name == caller_name, is_caller=token.device_name == caller_name,
) )
for token in tokens for token in tokens
] ]
def is_token_valid(token) -> bool:
"""Check if token is valid"""
return TOKEN_REPO.is_token_valid(token)
class NotFoundException(Exception): class NotFoundException(Exception):
"""Not found exception""" """Not found exception"""
@ -50,19 +54,22 @@ class CannotDeleteCallerException(Exception):
def delete_api_token(caller_token: str, token_name: str) -> None: def delete_api_token(caller_token: str, token_name: str) -> None:
"""Delete the token""" """Delete the token"""
if is_token_name_pair_valid(token_name, caller_token): if TOKEN_REPO.is_token_name_pair_valid(token_name, caller_token):
raise CannotDeleteCallerException("Cannot delete caller's token") raise CannotDeleteCallerException("Cannot delete caller's token")
if not is_token_name_exists(token_name): if not TOKEN_REPO.is_token_name_exists(token_name):
raise NotFoundException("Token not found") raise NotFoundException("Token not found")
delete_token(token_name) token = TOKEN_REPO.get_token_by_name(token_name)
TOKEN_REPO.delete_token(token)
def refresh_api_token(caller_token: str) -> str: def refresh_api_token(caller_token: str) -> str:
"""Refresh the token""" """Refresh the token"""
new_token = refresh_token(caller_token) try:
if new_token is None: old_token = TOKEN_REPO.get_token_by_token_string(caller_token)
new_token = TOKEN_REPO.refresh_token(old_token)
except TokenNotFound:
raise NotFoundException("Token not found") raise NotFoundException("Token not found")
return new_token return new_token.token
class RecoveryTokenStatus(BaseModel): class RecoveryTokenStatus(BaseModel):
@ -77,18 +84,16 @@ class RecoveryTokenStatus(BaseModel):
def get_api_recovery_token_status() -> RecoveryTokenStatus: def get_api_recovery_token_status() -> RecoveryTokenStatus:
"""Get the recovery token status""" """Get the recovery token status"""
if not is_recovery_token_exists(): token = TOKEN_REPO.get_recovery_key()
if token is None:
return RecoveryTokenStatus(exists=False, valid=False) return RecoveryTokenStatus(exists=False, valid=False)
status = get_recovery_token_status() is_valid = TOKEN_REPO.is_recovery_key_valid()
if status is None:
return RecoveryTokenStatus(exists=False, valid=False)
is_valid = is_recovery_token_valid()
return RecoveryTokenStatus( return RecoveryTokenStatus(
exists=True, exists=True,
valid=is_valid, valid=is_valid,
date=status["date"], date=token.created_at,
expiration=status["expiration"], expiration=token.expires_at,
uses_left=status["uses_left"], uses_left=token.uses_left,
) )
@ -112,5 +117,46 @@ def get_new_api_recovery_key(
if uses_left <= 0: if uses_left <= 0:
raise InvalidUsesLeft("Uses must be greater than 0") raise InvalidUsesLeft("Uses must be greater than 0")
key = generate_recovery_token(expiration_date, uses_left) key = TOKEN_REPO.create_recovery_key(expiration_date, uses_left)
return key mnemonic_phrase = Mnemonic(language="english").to_mnemonic(bytes.fromhex(key.key))
return mnemonic_phrase
def use_mnemonic_recovery_token(mnemonic_phrase, name):
"""Use the recovery token by converting the mnemonic word list to a byte array.
If the recovery token if invalid itself, return None
If the binary representation of phrase not matches
the byte array of the recovery token, return None.
If the mnemonic phrase is valid then generate a device token and return it.
Substract 1 from uses_left if it exists.
mnemonic_phrase is a string representation of the mnemonic word list.
"""
try:
token = TOKEN_REPO.use_mnemonic_recovery_key(mnemonic_phrase, name)
return token.token
except (RecoveryKeyNotFound, InvalidMnemonic):
return None
def delete_new_device_auth_token() -> None:
TOKEN_REPO.delete_new_device_key()
def get_new_device_auth_token() -> str:
"""Generate and store a new device auth token which is valid for 10 minutes
and return a mnemonic phrase representation
"""
key = TOKEN_REPO.get_new_device_key()
return Mnemonic(language="english").to_mnemonic(bytes.fromhex(key.key))
def use_new_device_auth_token(mnemonic_phrase, name) -> Optional[str]:
"""Use the new device auth token by converting the mnemonic string to a byte array.
If the mnemonic phrase is valid then generate a device token and return it.
New device auth token must be deleted.
"""
try:
token = TOKEN_REPO.use_mnemonic_new_device_key(mnemonic_phrase, name)
return token.token
except (NewDeviceKeyNotFound, InvalidMnemonic):
return None

View file

@ -2,7 +2,7 @@ from fastapi import Depends, HTTPException, status
from fastapi.security import APIKeyHeader from fastapi.security import APIKeyHeader
from pydantic import BaseModel from pydantic import BaseModel
from selfprivacy_api.utils.auth import is_token_valid from selfprivacy_api.actions.api_tokens import is_token_valid
class TokenHeader(BaseModel): class TokenHeader(BaseModel):
@ -27,4 +27,4 @@ async def get_token_header(
def get_api_version() -> str: def get_api_version() -> str:
"""Get API version""" """Get API version"""
return "2.0.9" return "2.1.2"

View file

@ -4,7 +4,7 @@ import typing
from strawberry.permission import BasePermission from strawberry.permission import BasePermission
from strawberry.types import Info from strawberry.types import Info
from selfprivacy_api.utils.auth import is_token_valid from selfprivacy_api.actions.api_tokens import is_token_valid
class IsAuthenticated(BasePermission): class IsAuthenticated(BasePermission):

View file

@ -43,7 +43,7 @@ def job_to_api_job(job: Job) -> ApiJob:
def get_api_job_by_id(job_id: str) -> typing.Optional[ApiJob]: def get_api_job_by_id(job_id: str) -> typing.Optional[ApiJob]:
"""Get a job for GraphQL by its ID.""" """Get a job for GraphQL by its ID."""
job = Jobs.get_instance().get_job(job_id) job = Jobs.get_job(job_id)
if job is None: if job is None:
return None return None
return job_to_api_job(job) return job_to_api_job(job)

View file

@ -11,6 +11,11 @@ from selfprivacy_api.actions.api_tokens import (
NotFoundException, NotFoundException,
delete_api_token, delete_api_token,
get_new_api_recovery_key, get_new_api_recovery_key,
use_mnemonic_recovery_token,
refresh_api_token,
delete_new_device_auth_token,
get_new_device_auth_token,
use_new_device_auth_token,
) )
from selfprivacy_api.graphql import IsAuthenticated from selfprivacy_api.graphql import IsAuthenticated
from selfprivacy_api.graphql.mutations.mutation_interface import ( from selfprivacy_api.graphql.mutations.mutation_interface import (
@ -18,14 +23,6 @@ from selfprivacy_api.graphql.mutations.mutation_interface import (
MutationReturnInterface, MutationReturnInterface,
) )
from selfprivacy_api.utils.auth import (
delete_new_device_auth_token,
get_new_device_auth_token,
refresh_token,
use_mnemonic_recoverery_token,
use_new_device_auth_token,
)
@strawberry.type @strawberry.type
class ApiKeyMutationReturn(MutationReturnInterface): class ApiKeyMutationReturn(MutationReturnInterface):
@ -98,50 +95,53 @@ class ApiMutations:
self, input: UseRecoveryKeyInput self, input: UseRecoveryKeyInput
) -> DeviceApiTokenMutationReturn: ) -> DeviceApiTokenMutationReturn:
"""Use recovery key""" """Use recovery key"""
token = use_mnemonic_recoverery_token(input.key, input.deviceName) token = use_mnemonic_recovery_token(input.key, input.deviceName)
if token is None: if token is not None:
return DeviceApiTokenMutationReturn(
success=True,
message="Recovery key used",
code=200,
token=token,
)
else:
return DeviceApiTokenMutationReturn( return DeviceApiTokenMutationReturn(
success=False, success=False,
message="Recovery key not found", message="Recovery key not found",
code=404, code=404,
token=None, token=None,
) )
return DeviceApiTokenMutationReturn(
success=True,
message="Recovery key used",
code=200,
token=token,
)
@strawberry.mutation(permission_classes=[IsAuthenticated]) @strawberry.mutation(permission_classes=[IsAuthenticated])
def refresh_device_api_token(self, info: Info) -> DeviceApiTokenMutationReturn: def refresh_device_api_token(self, info: Info) -> DeviceApiTokenMutationReturn:
"""Refresh device api token""" """Refresh device api token"""
token = ( token_string = (
info.context["request"] info.context["request"]
.headers.get("Authorization", "") .headers.get("Authorization", "")
.replace("Bearer ", "") .replace("Bearer ", "")
) )
if token is None: if token_string is None:
return DeviceApiTokenMutationReturn( return DeviceApiTokenMutationReturn(
success=False, success=False,
message="Token not found", message="Token not found",
code=404, code=404,
token=None, token=None,
) )
new_token = refresh_token(token)
if new_token is None: try:
new_token = refresh_api_token(token_string)
return DeviceApiTokenMutationReturn(
success=True,
message="Token refreshed",
code=200,
token=new_token,
)
except NotFoundException:
return DeviceApiTokenMutationReturn( return DeviceApiTokenMutationReturn(
success=False, success=False,
message="Token not found", message="Token not found",
code=404, code=404,
token=None, token=None,
) )
return DeviceApiTokenMutationReturn(
success=True,
message="Token refreshed",
code=200,
token=new_token,
)
@strawberry.mutation(permission_classes=[IsAuthenticated]) @strawberry.mutation(permission_classes=[IsAuthenticated])
def delete_device_api_token(self, device: str, info: Info) -> GenericMutationReturn: def delete_device_api_token(self, device: str, info: Info) -> GenericMutationReturn:

View file

@ -14,7 +14,7 @@ class JobMutations:
@strawberry.mutation(permission_classes=[IsAuthenticated]) @strawberry.mutation(permission_classes=[IsAuthenticated])
def remove_job(self, job_id: str) -> GenericMutationReturn: def remove_job(self, job_id: str) -> GenericMutationReturn:
"""Remove a job from the queue""" """Remove a job from the queue"""
result = Jobs.get_instance().remove_by_uid(job_id) result = Jobs.remove_by_uid(job_id)
if result: if result:
return GenericMutationReturn( return GenericMutationReturn(
success=True, success=True,

View file

@ -4,16 +4,12 @@ import datetime
import typing import typing
import strawberry import strawberry
from strawberry.types import Info from strawberry.types import Info
from selfprivacy_api.actions.api_tokens import get_api_tokens_with_caller_flag from selfprivacy_api.actions.api_tokens import (
from selfprivacy_api.graphql import IsAuthenticated get_api_tokens_with_caller_flag,
from selfprivacy_api.utils import parse_date get_api_recovery_token_status,
from selfprivacy_api.dependencies import get_api_version as get_api_version_dependency
from selfprivacy_api.utils.auth import (
get_recovery_token_status,
is_recovery_token_exists,
is_recovery_token_valid,
) )
from selfprivacy_api.graphql import IsAuthenticated
from selfprivacy_api.dependencies import get_api_version as get_api_version_dependency
def get_api_version() -> str: def get_api_version() -> str:
@ -43,16 +39,8 @@ class ApiRecoveryKeyStatus:
def get_recovery_key_status() -> ApiRecoveryKeyStatus: def get_recovery_key_status() -> ApiRecoveryKeyStatus:
"""Get recovery key status""" """Get recovery key status"""
if not is_recovery_token_exists(): status = get_api_recovery_token_status()
return ApiRecoveryKeyStatus( if status is None or not status.exists:
exists=False,
valid=False,
creation_date=None,
expiration_date=None,
uses_left=None,
)
status = get_recovery_token_status()
if status is None:
return ApiRecoveryKeyStatus( return ApiRecoveryKeyStatus(
exists=False, exists=False,
valid=False, valid=False,
@ -62,12 +50,10 @@ def get_recovery_key_status() -> ApiRecoveryKeyStatus:
) )
return ApiRecoveryKeyStatus( return ApiRecoveryKeyStatus(
exists=True, exists=True,
valid=is_recovery_token_valid(), valid=status.valid,
creation_date=parse_date(status["date"]), creation_date=status.date,
expiration_date=parse_date(status["expiration"]) expiration_date=status.expiration,
if status["expiration"] is not None uses_left=status.uses_left,
else None,
uses_left=status["uses_left"] if status["uses_left"] is not None else None,
) )

View file

@ -16,9 +16,9 @@ class Job:
@strawberry.field @strawberry.field
def get_jobs(self) -> typing.List[ApiJob]: def get_jobs(self) -> typing.List[ApiJob]:
Jobs.get_instance().get_jobs() Jobs.get_jobs()
return [job_to_api_job(job) for job in Jobs.get_instance().get_jobs()] return [job_to_api_job(job) for job in Jobs.get_jobs()]
@strawberry.field @strawberry.field
def get_job(self, job_id: str) -> typing.Optional[ApiJob]: def get_job(self, job_id: str) -> typing.Optional[ApiJob]:

View file

@ -6,8 +6,15 @@ import strawberry
@strawberry.enum @strawberry.enum
class DnsProvider(Enum): class DnsProvider(Enum):
CLOUDFLARE = "CLOUDFLARE" CLOUDFLARE = "CLOUDFLARE"
DIGITALOCEAN = "DIGITALOCEAN"
@strawberry.enum @strawberry.enum
class ServerProvider(Enum): class ServerProvider(Enum):
HETZNER = "HETZNER" HETZNER = "HETZNER"
DIGITALOCEAN = "DIGITALOCEAN"
@strawberry.enum
class BackupProvider(Enum):
BACKBLAZE = "BACKBLAZE"

View file

@ -44,7 +44,7 @@ def get_system_domain_info() -> SystemDomainInfo:
return SystemDomainInfo( return SystemDomainInfo(
domain=user_data["domain"], domain=user_data["domain"],
hostname=user_data["hostname"], hostname=user_data["hostname"],
provider=DnsProvider.CLOUDFLARE, provider=user_data["dns"]["provider"],
) )
@ -133,7 +133,11 @@ class SystemProviderInfo:
def get_system_provider_info() -> SystemProviderInfo: def get_system_provider_info() -> SystemProviderInfo:
"""Get system provider info""" """Get system provider info"""
return SystemProviderInfo(provider=ServerProvider.HETZNER, id="UNKNOWN") with ReadUserData() as user_data:
return SystemProviderInfo(
provider=user_data["server"]["provider"],
id="UNKNOWN",
)
@strawberry.type @strawberry.type

View file

@ -17,16 +17,14 @@ A job is a dictionary with the following keys:
import typing import typing
import datetime import datetime
from uuid import UUID from uuid import UUID
import asyncio
import json
import os
import time
import uuid import uuid
from enum import Enum from enum import Enum
from pydantic import BaseModel from pydantic import BaseModel
from selfprivacy_api.utils import ReadUserData, UserDataFiles, WriteUserData from selfprivacy_api.utils.redis_pool import RedisPool
JOB_EXPIRATION_SECONDS = 10 * 24 * 60 * 60 # ten days
class JobStatus(Enum): class JobStatus(Enum):
@ -64,36 +62,14 @@ class Jobs:
Jobs class. Jobs class.
""" """
__instance = None
@staticmethod
def get_instance():
"""
Singleton method.
"""
if Jobs.__instance is None:
Jobs()
if Jobs.__instance is None:
raise Exception("Couldn't init Jobs singleton!")
return Jobs.__instance
return Jobs.__instance
def __init__(self):
"""
Initialize the jobs list.
"""
if Jobs.__instance is not None:
raise Exception("This class is a singleton!")
else:
Jobs.__instance = self
@staticmethod @staticmethod
def reset() -> None: def reset() -> None:
""" """
Reset the jobs list. Reset the jobs list.
""" """
with WriteUserData(UserDataFiles.JOBS) as user_data: jobs = Jobs.get_jobs()
user_data["jobs"] = [] for job in jobs:
Jobs.remove(job)
@staticmethod @staticmethod
def add( def add(
@ -121,32 +97,27 @@ class Jobs:
error=None, error=None,
result=None, result=None,
) )
with WriteUserData(UserDataFiles.JOBS) as user_data: redis = RedisPool().get_connection()
try: _store_job_as_hash(redis, _redis_key_from_uuid(job.uid), job)
if "jobs" not in user_data:
user_data["jobs"] = []
user_data["jobs"].append(json.loads(job.json()))
except json.decoder.JSONDecodeError:
user_data["jobs"] = [json.loads(job.json())]
return job return job
def remove(self, job: Job) -> None: @staticmethod
def remove(job: Job) -> None:
""" """
Remove a job from the jobs list. Remove a job from the jobs list.
""" """
self.remove_by_uid(str(job.uid)) Jobs.remove_by_uid(str(job.uid))
def remove_by_uid(self, job_uuid: str) -> bool: @staticmethod
def remove_by_uid(job_uuid: str) -> bool:
""" """
Remove a job from the jobs list. Remove a job from the jobs list.
""" """
with WriteUserData(UserDataFiles.JOBS) as user_data: redis = RedisPool().get_connection()
if "jobs" not in user_data: key = _redis_key_from_uuid(job_uuid)
user_data["jobs"] = [] if redis.exists(key):
for i, j in enumerate(user_data["jobs"]): redis.delete(key)
if j["uid"] == job_uuid: return True
del user_data["jobs"][i]
return True
return False return False
@staticmethod @staticmethod
@ -178,13 +149,12 @@ class Jobs:
if status in (JobStatus.FINISHED, JobStatus.ERROR): if status in (JobStatus.FINISHED, JobStatus.ERROR):
job.finished_at = datetime.datetime.now() job.finished_at = datetime.datetime.now()
with WriteUserData(UserDataFiles.JOBS) as user_data: redis = RedisPool().get_connection()
if "jobs" not in user_data: key = _redis_key_from_uuid(job.uid)
user_data["jobs"] = [] if redis.exists(key):
for i, j in enumerate(user_data["jobs"]): _store_job_as_hash(redis, key, job)
if j["uid"] == str(job.uid): if status in (JobStatus.FINISHED, JobStatus.ERROR):
user_data["jobs"][i] = json.loads(job.json()) redis.expire(key, JOB_EXPIRATION_SECONDS)
break
return job return job
@ -193,12 +163,10 @@ class Jobs:
""" """
Get a job from the jobs list. Get a job from the jobs list.
""" """
with ReadUserData(UserDataFiles.JOBS) as user_data: redis = RedisPool().get_connection()
if "jobs" not in user_data: key = _redis_key_from_uuid(uid)
user_data["jobs"] = [] if redis.exists(key):
for job in user_data["jobs"]: return _job_from_hash(redis, key)
if job["uid"] == uid:
return Job(**job)
return None return None
@staticmethod @staticmethod
@ -206,23 +174,54 @@ class Jobs:
""" """
Get the jobs list. Get the jobs list.
""" """
with ReadUserData(UserDataFiles.JOBS) as user_data: redis = RedisPool().get_connection()
try: job_keys = redis.keys("jobs:*")
if "jobs" not in user_data: jobs = []
user_data["jobs"] = [] for job_key in job_keys:
return [Job(**job) for job in user_data["jobs"]] job = _job_from_hash(redis, job_key)
except json.decoder.JSONDecodeError: if job is not None:
return [] jobs.append(job)
return jobs
@staticmethod @staticmethod
def is_busy() -> bool: def is_busy() -> bool:
""" """
Check if there is a job running. Check if there is a job running.
""" """
with ReadUserData(UserDataFiles.JOBS) as user_data: for job in Jobs.get_jobs():
if "jobs" not in user_data: if job.status == JobStatus.RUNNING:
user_data["jobs"] = [] return True
for job in user_data["jobs"]:
if job["status"] == JobStatus.RUNNING.value:
return True
return False return False
def _redis_key_from_uuid(uuid_string):
return "jobs:" + str(uuid_string)
def _store_job_as_hash(redis, redis_key, model):
for key, value in model.dict().items():
if isinstance(value, uuid.UUID):
value = str(value)
if isinstance(value, datetime.datetime):
value = value.isoformat()
if isinstance(value, JobStatus):
value = value.value
redis.hset(redis_key, key, str(value))
def _job_from_hash(redis, redis_key):
if redis.exists(redis_key):
job_dict = redis.hgetall(redis_key)
for date in [
"created_at",
"updated_at",
"finished_at",
]:
if job_dict[date] != "None":
job_dict[date] = datetime.datetime.fromisoformat(job_dict[date])
for key in job_dict.keys():
if job_dict[key] == "None":
job_dict[key] = None
return Job(**job_dict)
return None

View file

@ -5,7 +5,7 @@ from selfprivacy_api.jobs import JobStatus, Jobs
@huey.task() @huey.task()
def test_job(): def test_job():
job = Jobs.get_instance().add( job = Jobs.add(
type_id="test", type_id="test",
name="Test job", name="Test job",
description="This is a test job.", description="This is a test job.",
@ -14,42 +14,42 @@ def test_job():
progress=0, progress=0,
) )
time.sleep(5) time.sleep(5)
Jobs.get_instance().update( Jobs.update(
job=job, job=job,
status=JobStatus.RUNNING, status=JobStatus.RUNNING,
status_text="Performing pre-move checks...", status_text="Performing pre-move checks...",
progress=5, progress=5,
) )
time.sleep(5) time.sleep(5)
Jobs.get_instance().update( Jobs.update(
job=job, job=job,
status=JobStatus.RUNNING, status=JobStatus.RUNNING,
status_text="Performing pre-move checks...", status_text="Performing pre-move checks...",
progress=10, progress=10,
) )
time.sleep(5) time.sleep(5)
Jobs.get_instance().update( Jobs.update(
job=job, job=job,
status=JobStatus.RUNNING, status=JobStatus.RUNNING,
status_text="Performing pre-move checks...", status_text="Performing pre-move checks...",
progress=15, progress=15,
) )
time.sleep(5) time.sleep(5)
Jobs.get_instance().update( Jobs.update(
job=job, job=job,
status=JobStatus.RUNNING, status=JobStatus.RUNNING,
status_text="Performing pre-move checks...", status_text="Performing pre-move checks...",
progress=20, progress=20,
) )
time.sleep(5) time.sleep(5)
Jobs.get_instance().update( Jobs.update(
job=job, job=job,
status=JobStatus.RUNNING, status=JobStatus.RUNNING,
status_text="Performing pre-move checks...", status_text="Performing pre-move checks...",
progress=25, progress=25,
) )
time.sleep(5) time.sleep(5)
Jobs.get_instance().update( Jobs.update(
job=job, job=job,
status=JobStatus.FINISHED, status=JobStatus.FINISHED,
status_text="Job finished.", status_text="Job finished.",

View file

@ -18,6 +18,10 @@ from selfprivacy_api.migrations.migrate_to_selfprivacy_channel import (
MigrateToSelfprivacyChannel, MigrateToSelfprivacyChannel,
) )
from selfprivacy_api.migrations.mount_volume import MountVolume from selfprivacy_api.migrations.mount_volume import MountVolume
from selfprivacy_api.migrations.providers import CreateProviderFields
from selfprivacy_api.migrations.prepare_for_nixos_2211 import (
MigrateToSelfprivacyChannelFrom2205,
)
migrations = [ migrations = [
FixNixosConfigBranch(), FixNixosConfigBranch(),
@ -25,6 +29,8 @@ migrations = [
MigrateToSelfprivacyChannel(), MigrateToSelfprivacyChannel(),
MountVolume(), MountVolume(),
CheckForFailedBindsMigration(), CheckForFailedBindsMigration(),
CreateProviderFields(),
MigrateToSelfprivacyChannelFrom2205(),
] ]

View file

@ -15,7 +15,7 @@ class CheckForFailedBindsMigration(Migration):
def is_migration_needed(self): def is_migration_needed(self):
try: try:
jobs = Jobs.get_instance().get_jobs() jobs = Jobs.get_jobs()
# If there is a job with type_id "migrations.migrate_to_binds" and status is not "FINISHED", # If there is a job with type_id "migrations.migrate_to_binds" and status is not "FINISHED",
# then migration is needed and job is deleted # then migration is needed and job is deleted
for job in jobs: for job in jobs:
@ -33,13 +33,13 @@ class CheckForFailedBindsMigration(Migration):
# Get info about existing volumes # Get info about existing volumes
# Write info about volumes to userdata.json # Write info about volumes to userdata.json
try: try:
jobs = Jobs.get_instance().get_jobs() jobs = Jobs.get_jobs()
for job in jobs: for job in jobs:
if ( if (
job.type_id == "migrations.migrate_to_binds" job.type_id == "migrations.migrate_to_binds"
and job.status != JobStatus.FINISHED and job.status != JobStatus.FINISHED
): ):
Jobs.get_instance().remove(job) Jobs.remove(job)
with WriteUserData() as userdata: with WriteUserData() as userdata:
userdata["useBinds"] = False userdata["useBinds"] = False
print("Done") print("Done")

View file

@ -0,0 +1,58 @@
import os
import subprocess
from selfprivacy_api.migrations.migration import Migration
class MigrateToSelfprivacyChannelFrom2205(Migration):
"""Migrate to selfprivacy Nix channel.
For some reason NixOS 22.05 servers initialized with the nixos channel instead of selfprivacy.
This stops us from upgrading to NixOS 22.11
"""
def get_migration_name(self):
return "migrate_to_selfprivacy_channel_from_2205"
def get_migration_description(self):
return "Migrate to selfprivacy Nix channel from NixOS 22.05."
def is_migration_needed(self):
try:
output = subprocess.check_output(
["nix-channel", "--list"], start_new_session=True
)
output = output.decode("utf-8")
first_line = output.split("\n", maxsplit=1)[0]
return first_line.startswith("nixos") and (
first_line.endswith("nixos-22.05")
)
except subprocess.CalledProcessError:
return False
def migrate(self):
# Change the channel and update them.
# Also, go to /etc/nixos directory and make a git pull
current_working_directory = os.getcwd()
try:
print("Changing channel")
os.chdir("/etc/nixos")
subprocess.check_output(
[
"nix-channel",
"--add",
"https://channel.selfprivacy.org/nixos-selfpricacy",
"nixos",
]
)
subprocess.check_output(["nix-channel", "--update"])
nixos_config_branch = subprocess.check_output(
["git", "rev-parse", "--abbrev-ref", "HEAD"], start_new_session=True
)
if nixos_config_branch.decode("utf-8").strip() == "api-redis":
print("Also changing nixos-config branch from api-redis to master")
subprocess.check_output(["git", "checkout", "master"])
subprocess.check_output(["git", "pull"])
os.chdir(current_working_directory)
except subprocess.CalledProcessError:
os.chdir(current_working_directory)
print("Error")

View file

@ -0,0 +1,43 @@
from selfprivacy_api.migrations.migration import Migration
from selfprivacy_api.utils import ReadUserData, WriteUserData
class CreateProviderFields(Migration):
"""Unhardcode providers"""
def get_migration_name(self):
return "create_provider_fields"
def get_migration_description(self):
return "Add DNS, backup and server provider fields to enable user to choose between different clouds and to make the deployment adapt to these preferences."
def is_migration_needed(self):
try:
with ReadUserData() as userdata:
return "dns" not in userdata
except Exception as e:
print(e)
return False
def migrate(self):
# Write info about providers to userdata.json
try:
with WriteUserData() as userdata:
userdata["dns"] = {
"provider": "CLOUDFLARE",
"apiKey": userdata["cloudflare"]["apiKey"],
}
userdata["server"] = {
"provider": "HETZNER",
}
userdata["backup"] = {
"provider": "BACKBLAZE",
"accountId": userdata["backblaze"]["accountId"],
"accountKey": userdata["backblaze"]["accountKey"],
"bucket": userdata["backblaze"]["bucket"],
}
print("Done")
except Exception as e:
print(e)
print("Error migrating provider fields")

View file

@ -1,55 +1,84 @@
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
from datetime import datetime from datetime import datetime
from typing import Optional from typing import Optional
from mnemonic import Mnemonic
from secrets import randbelow
import re
from selfprivacy_api.models.tokens.token import Token from selfprivacy_api.models.tokens.token import Token
from selfprivacy_api.repositories.tokens.exceptions import (
TokenNotFound,
InvalidMnemonic,
RecoveryKeyNotFound,
NewDeviceKeyNotFound,
)
from selfprivacy_api.models.tokens.recovery_key import RecoveryKey from selfprivacy_api.models.tokens.recovery_key import RecoveryKey
from selfprivacy_api.models.tokens.new_device_key import NewDeviceKey from selfprivacy_api.models.tokens.new_device_key import NewDeviceKey
class AbstractTokensRepository(ABC): class AbstractTokensRepository(ABC):
@abstractmethod def get_token_by_token_string(self, token_string: str) -> Token:
def get_token_by_token_string(self, token_string: str) -> Optional[Token]:
"""Get the token by token""" """Get the token by token"""
tokens = self.get_tokens()
for token in tokens:
if token.token == token_string:
return token
@abstractmethod raise TokenNotFound("Token not found!")
def get_token_by_name(self, token_name: str) -> Optional[Token]:
def get_token_by_name(self, token_name: str) -> Token:
"""Get the token by name""" """Get the token by name"""
tokens = self.get_tokens()
for token in tokens:
if token.device_name == token_name:
return token
raise TokenNotFound("Token not found!")
@abstractmethod @abstractmethod
def get_tokens(self) -> list[Token]: def get_tokens(self) -> list[Token]:
"""Get the tokens""" """Get the tokens"""
@abstractmethod
def create_token(self, device_name: str) -> Token: def create_token(self, device_name: str) -> Token:
"""Create new token""" """Create new token"""
unique_name = self._make_unique_device_name(device_name)
new_token = Token.generate(unique_name)
self._store_token(new_token)
return new_token
@abstractmethod @abstractmethod
def delete_token(self, input_token: Token) -> None: def delete_token(self, input_token: Token) -> None:
"""Delete the token""" """Delete the token"""
@abstractmethod
def refresh_token(self, input_token: Token) -> Token: def refresh_token(self, input_token: Token) -> Token:
"""Refresh the token""" """Change the token field of the existing token"""
new_token = Token.generate(device_name=input_token.device_name)
new_token.created_at = input_token.created_at
if input_token in self.get_tokens():
self.delete_token(input_token)
self._store_token(new_token)
return new_token
raise TokenNotFound("Token not found!")
def is_token_valid(self, token_string: str) -> bool: def is_token_valid(self, token_string: str) -> bool:
"""Check if the token is valid""" """Check if the token is valid"""
token = self.get_token_by_token_string(token_string) return token_string in [token.token for token in self.get_tokens()]
if token is None:
return False
return True
def is_token_name_exists(self, token_name: str) -> bool: def is_token_name_exists(self, token_name: str) -> bool:
"""Check if the token name exists""" """Check if the token name exists"""
token = self.get_token_by_name(token_name) return token_name in [token.device_name for token in self.get_tokens()]
if token is None:
return False
return True
def is_token_name_pair_valid(self, token_name: str, token_string: str) -> bool: def is_token_name_pair_valid(self, token_name: str, token_string: str) -> bool:
"""Check if the token name and token are valid""" """Check if the token name and token are valid"""
token = self.get_token_by_name(token_name) try:
if token is None: token = self.get_token_by_name(token_name)
if token is None:
return False
except TokenNotFound:
return False return False
return token.token == token_string return token.token == token_string
@ -65,11 +94,27 @@ class AbstractTokensRepository(ABC):
) -> RecoveryKey: ) -> RecoveryKey:
"""Create the recovery key""" """Create the recovery key"""
@abstractmethod
def use_mnemonic_recovery_key( def use_mnemonic_recovery_key(
self, mnemonic_phrase: str, device_name: str self, mnemonic_phrase: str, device_name: str
) -> Token: ) -> Token:
"""Use the mnemonic recovery key and create a new token with the given name""" """Use the mnemonic recovery key and create a new token with the given name"""
if not self.is_recovery_key_valid():
raise RecoveryKeyNotFound("Recovery key not found")
recovery_key = self.get_recovery_key()
if recovery_key is None:
raise RecoveryKeyNotFound("Recovery key not found")
recovery_hex_key = recovery_key.key
if not self._assert_mnemonic(recovery_hex_key, mnemonic_phrase):
raise RecoveryKeyNotFound("Recovery key not found")
new_token = self.create_token(device_name=device_name)
self._decrement_recovery_token()
return new_token
def is_recovery_key_valid(self) -> bool: def is_recovery_key_valid(self) -> bool:
"""Check if the recovery key is valid""" """Check if the recovery key is valid"""
@ -78,16 +123,71 @@ class AbstractTokensRepository(ABC):
return False return False
return recovery_key.is_valid() return recovery_key.is_valid()
@abstractmethod
def get_new_device_key(self) -> NewDeviceKey: def get_new_device_key(self) -> NewDeviceKey:
"""Creates and returns the new device key""" """Creates and returns the new device key"""
new_device_key = NewDeviceKey.generate()
self._store_new_device_key(new_device_key)
return new_device_key
def _store_new_device_key(self, new_device_key: NewDeviceKey) -> None:
"""Store new device key directly"""
@abstractmethod @abstractmethod
def delete_new_device_key(self) -> None: def delete_new_device_key(self) -> None:
"""Delete the new device key""" """Delete the new device key"""
@abstractmethod
def use_mnemonic_new_device_key( def use_mnemonic_new_device_key(
self, mnemonic_phrase: str, device_name: str self, mnemonic_phrase: str, device_name: str
) -> Token: ) -> Token:
"""Use the mnemonic new device key""" """Use the mnemonic new device key"""
new_device_key = self._get_stored_new_device_key()
if not new_device_key:
raise NewDeviceKeyNotFound
if not new_device_key.is_valid():
raise NewDeviceKeyNotFound
if not self._assert_mnemonic(new_device_key.key, mnemonic_phrase):
raise NewDeviceKeyNotFound("Phrase is not token!")
new_token = self.create_token(device_name=device_name)
self.delete_new_device_key()
return new_token
@abstractmethod
def _store_token(self, new_token: Token):
"""Store a token directly"""
@abstractmethod
def _decrement_recovery_token(self):
"""Decrement recovery key use count by one"""
@abstractmethod
def _get_stored_new_device_key(self) -> Optional[NewDeviceKey]:
"""Retrieves new device key that is already stored."""
def _make_unique_device_name(self, name: str) -> str:
"""Token name must be an alphanumeric string and not empty.
Replace invalid characters with '_'
If name exists, add a random number to the end of the name until it is unique.
"""
if not re.match("^[a-zA-Z0-9]*$", name):
name = re.sub("[^a-zA-Z0-9]", "_", name)
if name == "":
name = "Unknown device"
while self.is_token_name_exists(name):
name += str(randbelow(10))
return name
# TODO: find a proper place for it
def _assert_mnemonic(self, hex_key: str, mnemonic_phrase: str):
"""Return true if hex string matches the phrase, false otherwise
Raise an InvalidMnemonic error if not mnemonic"""
recovery_token = bytes.fromhex(hex_key)
if not Mnemonic(language="english").check(mnemonic_phrase):
raise InvalidMnemonic("Phrase is not mnemonic!")
phrase_bytes = Mnemonic(language="english").to_entropy(mnemonic_phrase)
return phrase_bytes == recovery_token

View file

@ -3,7 +3,6 @@ temporary legacy
""" """
from typing import Optional from typing import Optional
from datetime import datetime from datetime import datetime
from mnemonic import Mnemonic
from selfprivacy_api.utils import UserDataFiles, WriteUserData, ReadUserData from selfprivacy_api.utils import UserDataFiles, WriteUserData, ReadUserData
from selfprivacy_api.models.tokens.token import Token from selfprivacy_api.models.tokens.token import Token
@ -11,9 +10,6 @@ from selfprivacy_api.models.tokens.recovery_key import RecoveryKey
from selfprivacy_api.models.tokens.new_device_key import NewDeviceKey from selfprivacy_api.models.tokens.new_device_key import NewDeviceKey
from selfprivacy_api.repositories.tokens.exceptions import ( from selfprivacy_api.repositories.tokens.exceptions import (
TokenNotFound, TokenNotFound,
RecoveryKeyNotFound,
InvalidMnemonic,
NewDeviceKeyNotFound,
) )
from selfprivacy_api.repositories.tokens.abstract_tokens_repository import ( from selfprivacy_api.repositories.tokens.abstract_tokens_repository import (
AbstractTokensRepository, AbstractTokensRepository,
@ -23,34 +19,6 @@ DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%f"
class JsonTokensRepository(AbstractTokensRepository): class JsonTokensRepository(AbstractTokensRepository):
def get_token_by_token_string(self, token_string: str) -> Optional[Token]:
"""Get the token by token"""
with ReadUserData(UserDataFiles.TOKENS) as tokens_file:
for userdata_token in tokens_file["tokens"]:
if userdata_token["token"] == token_string:
return Token(
token=token_string,
device_name=userdata_token["name"],
created_at=userdata_token["date"],
)
raise TokenNotFound("Token not found!")
def get_token_by_name(self, token_name: str) -> Optional[Token]:
"""Get the token by name"""
with ReadUserData(UserDataFiles.TOKENS) as tokens_file:
for userdata_token in tokens_file["tokens"]:
if userdata_token["name"] == token_name:
return Token(
token=userdata_token["token"],
device_name=token_name,
created_at=userdata_token["date"],
)
raise TokenNotFound("Token not found!")
def get_tokens(self) -> list[Token]: def get_tokens(self) -> list[Token]:
"""Get the tokens""" """Get the tokens"""
tokens_list = [] tokens_list = []
@ -67,10 +35,8 @@ class JsonTokensRepository(AbstractTokensRepository):
return tokens_list return tokens_list
def create_token(self, device_name: str) -> Token: def _store_token(self, new_token: Token):
"""Create new token""" """Store a token directly"""
new_token = Token.generate(device_name)
with WriteUserData(UserDataFiles.TOKENS) as tokens_file: with WriteUserData(UserDataFiles.TOKENS) as tokens_file:
tokens_file["tokens"].append( tokens_file["tokens"].append(
{ {
@ -79,7 +45,6 @@ class JsonTokensRepository(AbstractTokensRepository):
"date": new_token.created_at.strftime(DATETIME_FORMAT), "date": new_token.created_at.strftime(DATETIME_FORMAT),
} }
) )
return new_token
def delete_token(self, input_token: Token) -> None: def delete_token(self, input_token: Token) -> None:
"""Delete the token""" """Delete the token"""
@ -91,23 +56,6 @@ class JsonTokensRepository(AbstractTokensRepository):
raise TokenNotFound("Token not found!") raise TokenNotFound("Token not found!")
def refresh_token(self, input_token: Token) -> Token:
"""Change the token field of the existing token"""
new_token = Token.generate(device_name=input_token.device_name)
with WriteUserData(UserDataFiles.TOKENS) as tokens_file:
for userdata_token in tokens_file["tokens"]:
if userdata_token["name"] == input_token.device_name:
userdata_token["token"] = new_token.token
userdata_token["date"] = (
new_token.created_at.strftime(DATETIME_FORMAT),
)
return new_token
raise TokenNotFound("Token not found!")
def get_recovery_key(self) -> Optional[RecoveryKey]: def get_recovery_key(self) -> Optional[RecoveryKey]:
"""Get the recovery key""" """Get the recovery key"""
with ReadUserData(UserDataFiles.TOKENS) as tokens_file: with ReadUserData(UserDataFiles.TOKENS) as tokens_file:
@ -121,7 +69,7 @@ class JsonTokensRepository(AbstractTokensRepository):
recovery_key = RecoveryKey( recovery_key = RecoveryKey(
key=tokens_file["recovery_token"].get("token"), key=tokens_file["recovery_token"].get("token"),
created_at=tokens_file["recovery_token"].get("date"), created_at=tokens_file["recovery_token"].get("date"),
expires_at=tokens_file["recovery_token"].get("expitation"), expires_at=tokens_file["recovery_token"].get("expiration"),
uses_left=tokens_file["recovery_token"].get("uses_left"), uses_left=tokens_file["recovery_token"].get("uses_left"),
) )
@ -137,59 +85,26 @@ class JsonTokensRepository(AbstractTokensRepository):
recovery_key = RecoveryKey.generate(expiration, uses_left) recovery_key = RecoveryKey.generate(expiration, uses_left)
with WriteUserData(UserDataFiles.TOKENS) as tokens_file: with WriteUserData(UserDataFiles.TOKENS) as tokens_file:
key_expiration: Optional[str] = None
if recovery_key.expires_at is not None:
key_expiration = recovery_key.expires_at.strftime(DATETIME_FORMAT)
tokens_file["recovery_token"] = { tokens_file["recovery_token"] = {
"token": recovery_key.key, "token": recovery_key.key,
"date": recovery_key.created_at.strftime(DATETIME_FORMAT), "date": recovery_key.created_at.strftime(DATETIME_FORMAT),
"expiration": recovery_key.expires_at, "expiration": key_expiration,
"uses_left": recovery_key.uses_left, "uses_left": recovery_key.uses_left,
} }
return recovery_key return recovery_key
def use_mnemonic_recovery_key( def _decrement_recovery_token(self):
self, mnemonic_phrase: str, device_name: str """Decrement recovery key use count by one"""
) -> Token: if self.is_recovery_key_valid():
"""Use the mnemonic recovery key and create a new token with the given name""" with WriteUserData(UserDataFiles.TOKENS) as tokens:
recovery_key = self.get_recovery_key() if tokens["recovery_token"]["uses_left"] is not None:
if recovery_key is None:
raise RecoveryKeyNotFound("Recovery key not found")
if not recovery_key.is_valid():
raise RecoveryKeyNotFound("Recovery key not found")
recovery_token = bytes.fromhex(recovery_key.key)
if not Mnemonic(language="english").check(mnemonic_phrase):
raise InvalidMnemonic("Phrase is not mnemonic!")
phrase_bytes = Mnemonic(language="english").to_entropy(mnemonic_phrase)
if phrase_bytes != recovery_token:
raise RecoveryKeyNotFound("Recovery key not found")
new_token = Token.generate(device_name=device_name)
with WriteUserData(UserDataFiles.TOKENS) as tokens:
tokens["tokens"].append(
{
"token": new_token.token,
"name": new_token.device_name,
"date": new_token.created_at.strftime(DATETIME_FORMAT),
}
)
if "recovery_token" in tokens:
if (
"uses_left" in tokens["recovery_token"]
and tokens["recovery_token"]["uses_left"] is not None
):
tokens["recovery_token"]["uses_left"] -= 1 tokens["recovery_token"]["uses_left"] -= 1
return new_token
def get_new_device_key(self) -> NewDeviceKey:
"""Creates and returns the new device key"""
new_device_key = NewDeviceKey.generate()
def _store_new_device_key(self, new_device_key: NewDeviceKey) -> None:
with WriteUserData(UserDataFiles.TOKENS) as tokens_file: with WriteUserData(UserDataFiles.TOKENS) as tokens_file:
tokens_file["new_device"] = { tokens_file["new_device"] = {
"token": new_device_key.key, "token": new_device_key.key,
@ -197,8 +112,6 @@ class JsonTokensRepository(AbstractTokensRepository):
"expiration": new_device_key.expires_at.strftime(DATETIME_FORMAT), "expiration": new_device_key.expires_at.strftime(DATETIME_FORMAT),
} }
return new_device_key
def delete_new_device_key(self) -> None: def delete_new_device_key(self) -> None:
"""Delete the new device key""" """Delete the new device key"""
with WriteUserData(UserDataFiles.TOKENS) as tokens_file: with WriteUserData(UserDataFiles.TOKENS) as tokens_file:
@ -206,33 +119,15 @@ class JsonTokensRepository(AbstractTokensRepository):
del tokens_file["new_device"] del tokens_file["new_device"]
return return
def use_mnemonic_new_device_key( def _get_stored_new_device_key(self) -> Optional[NewDeviceKey]:
self, mnemonic_phrase: str, device_name: str """Retrieves new device key that is already stored."""
) -> Token:
"""Use the mnemonic new device key"""
with ReadUserData(UserDataFiles.TOKENS) as tokens_file: with ReadUserData(UserDataFiles.TOKENS) as tokens_file:
if "new_device" not in tokens_file or tokens_file["new_device"] is None: if "new_device" not in tokens_file or tokens_file["new_device"] is None:
raise NewDeviceKeyNotFound("New device key not found") return
new_device_key = NewDeviceKey( new_device_key = NewDeviceKey(
key=tokens_file["new_device"]["token"], key=tokens_file["new_device"]["token"],
created_at=tokens_file["new_device"]["date"], created_at=tokens_file["new_device"]["date"],
expires_at=tokens_file["new_device"]["expiration"], expires_at=tokens_file["new_device"]["expiration"],
) )
return new_device_key
token = bytes.fromhex(new_device_key.key)
if not Mnemonic(language="english").check(mnemonic_phrase):
raise InvalidMnemonic("Phrase is not mnemonic!")
phrase_bytes = Mnemonic(language="english").to_entropy(mnemonic_phrase)
if bytes(phrase_bytes) != bytes(token):
raise NewDeviceKeyNotFound("Phrase is not token!")
new_token = Token.generate(device_name=device_name)
with WriteUserData(UserDataFiles.TOKENS) as tokens:
if "new_device" in tokens:
del tokens["new_device"]
return new_token

View file

@ -1,9 +1,21 @@
""" """
Token repository using Redis as backend. Token repository using Redis as backend.
""" """
from typing import Optional
from datetime import datetime
from selfprivacy_api.repositories.tokens.abstract_tokens_repository import ( from selfprivacy_api.repositories.tokens.abstract_tokens_repository import (
AbstractTokensRepository, AbstractTokensRepository,
) )
from selfprivacy_api.utils.redis_pool import RedisPool
from selfprivacy_api.models.tokens.token import Token
from selfprivacy_api.models.tokens.recovery_key import RecoveryKey
from selfprivacy_api.models.tokens.new_device_key import NewDeviceKey
from selfprivacy_api.repositories.tokens.exceptions import TokenNotFound
TOKENS_PREFIX = "token_repo:tokens:"
NEW_DEVICE_KEY_REDIS_KEY = "token_repo:new_device_key"
RECOVERY_KEY_REDIS_KEY = "token_repo:recovery_key"
class RedisTokensRepository(AbstractTokensRepository): class RedisTokensRepository(AbstractTokensRepository):
@ -11,5 +23,132 @@ class RedisTokensRepository(AbstractTokensRepository):
Token repository using Redis as a backend Token repository using Redis as a backend
""" """
def __init__(self) -> None: def __init__(self):
raise NotImplementedError self.connection = RedisPool().get_connection()
@staticmethod
def token_key_for_device(device_name: str):
return TOKENS_PREFIX + str(hash(device_name))
def get_tokens(self) -> list[Token]:
"""Get the tokens"""
redis = self.connection
token_keys = redis.keys(TOKENS_PREFIX + "*")
tokens = []
for key in token_keys:
token = self._token_from_hash(key)
if token is not None:
tokens.append(token)
return tokens
def delete_token(self, input_token: Token) -> None:
"""Delete the token"""
redis = self.connection
key = RedisTokensRepository._token_redis_key(input_token)
if input_token not in self.get_tokens():
raise TokenNotFound
redis.delete(key)
def reset(self):
for token in self.get_tokens():
self.delete_token(token)
self.delete_new_device_key()
redis = self.connection
redis.delete(RECOVERY_KEY_REDIS_KEY)
def get_recovery_key(self) -> Optional[RecoveryKey]:
"""Get the recovery key"""
redis = self.connection
if redis.exists(RECOVERY_KEY_REDIS_KEY):
return self._recovery_key_from_hash(RECOVERY_KEY_REDIS_KEY)
return None
def create_recovery_key(
self,
expiration: Optional[datetime],
uses_left: Optional[int],
) -> RecoveryKey:
"""Create the recovery key"""
recovery_key = RecoveryKey.generate(expiration=expiration, uses_left=uses_left)
self._store_model_as_hash(RECOVERY_KEY_REDIS_KEY, recovery_key)
return recovery_key
def _store_new_device_key(self, new_device_key: NewDeviceKey) -> None:
"""Store new device key directly"""
self._store_model_as_hash(NEW_DEVICE_KEY_REDIS_KEY, new_device_key)
def delete_new_device_key(self) -> None:
"""Delete the new device key"""
redis = self.connection
redis.delete(NEW_DEVICE_KEY_REDIS_KEY)
@staticmethod
def _token_redis_key(token: Token) -> str:
return RedisTokensRepository.token_key_for_device(token.device_name)
def _store_token(self, new_token: Token):
"""Store a token directly"""
key = RedisTokensRepository._token_redis_key(new_token)
self._store_model_as_hash(key, new_token)
def _decrement_recovery_token(self):
"""Decrement recovery key use count by one"""
if self.is_recovery_key_valid():
recovery_key = self.get_recovery_key()
if recovery_key is None:
return
uses_left = recovery_key.uses_left
if uses_left is not None:
redis = self.connection
redis.hset(RECOVERY_KEY_REDIS_KEY, "uses_left", uses_left - 1)
def _get_stored_new_device_key(self) -> Optional[NewDeviceKey]:
"""Retrieves new device key that is already stored."""
return self._new_device_key_from_hash(NEW_DEVICE_KEY_REDIS_KEY)
@staticmethod
def _is_date_key(key: str):
return key in [
"created_at",
"expires_at",
]
@staticmethod
def _prepare_model_dict(d: dict):
date_keys = [key for key in d.keys() if RedisTokensRepository._is_date_key(key)]
for date in date_keys:
if d[date] != "None":
d[date] = datetime.fromisoformat(d[date])
for key in d.keys():
if d[key] == "None":
d[key] = None
def _model_dict_from_hash(self, redis_key: str) -> Optional[dict]:
redis = self.connection
if redis.exists(redis_key):
token_dict = redis.hgetall(redis_key)
RedisTokensRepository._prepare_model_dict(token_dict)
return token_dict
return None
def _hash_as_model(self, redis_key: str, model_class):
token_dict = self._model_dict_from_hash(redis_key)
if token_dict is not None:
return model_class(**token_dict)
return None
def _token_from_hash(self, redis_key: str) -> Optional[Token]:
return self._hash_as_model(redis_key, Token)
def _recovery_key_from_hash(self, redis_key: str) -> Optional[RecoveryKey]:
return self._hash_as_model(redis_key, RecoveryKey)
def _new_device_key_from_hash(self, redis_key: str) -> Optional[NewDeviceKey]:
return self._hash_as_model(redis_key, NewDeviceKey)
def _store_model_as_hash(self, redis_key, model):
redis = self.connection
for key, value in model.dict().items():
if isinstance(value, datetime):
value = value.isoformat()
redis.hset(redis_key, key, str(value))

View file

@ -8,20 +8,18 @@ from selfprivacy_api.actions.api_tokens import (
InvalidUsesLeft, InvalidUsesLeft,
NotFoundException, NotFoundException,
delete_api_token, delete_api_token,
refresh_api_token,
get_api_recovery_token_status, get_api_recovery_token_status,
get_api_tokens_with_caller_flag, get_api_tokens_with_caller_flag,
get_new_api_recovery_key, get_new_api_recovery_key,
refresh_api_token, use_mnemonic_recovery_token,
delete_new_device_auth_token,
get_new_device_auth_token,
use_new_device_auth_token,
) )
from selfprivacy_api.dependencies import TokenHeader, get_token_header from selfprivacy_api.dependencies import TokenHeader, get_token_header
from selfprivacy_api.utils.auth import (
delete_new_device_auth_token,
get_new_device_auth_token,
use_mnemonic_recoverery_token,
use_new_device_auth_token,
)
router = APIRouter( router = APIRouter(
prefix="/auth", prefix="/auth",
@ -99,7 +97,7 @@ class UseTokenInput(BaseModel):
@router.post("/recovery_token/use") @router.post("/recovery_token/use")
async def rest_use_recovery_token(input: UseTokenInput): async def rest_use_recovery_token(input: UseTokenInput):
token = use_mnemonic_recoverery_token(input.token, input.device) token = use_mnemonic_recovery_token(input.token, input.device)
if token is None: if token is None:
raise HTTPException(status_code=404, detail="Token not found") raise HTTPException(status_code=404, detail="Token not found")
return {"token": token} return {"token": token}

View file

@ -117,7 +117,7 @@ async def get_mailserver_dkim():
"""Get the DKIM record for the mailserver""" """Get the DKIM record for the mailserver"""
domain = get_domain() domain = get_domain()
dkim = get_dkim_key(domain) dkim = get_dkim_key(domain, parse=False)
if dkim is None: if dkim is None:
raise HTTPException(status_code=404, detail="DKIM record not found") raise HTTPException(status_code=404, detail="DKIM record not found")
dkim = base64.b64encode(dkim.encode("utf-8")).decode("utf-8") dkim = base64.b64encode(dkim.encode("utf-8")).decode("utf-8")
@ -257,24 +257,25 @@ async def restore_restic_backup(backup: BackupRestoreInput):
raise HTTPException(status_code=404, detail="Backup not found") raise HTTPException(status_code=404, detail="Backup not found")
class BackblazeConfigInput(BaseModel): class BackupConfigInput(BaseModel):
accountId: str accountId: str
accountKey: str accountKey: str
bucket: str bucket: str
@router.put("/restic/backblaze/config") @router.put("/restic/backblaze/config")
async def set_backblaze_config(backblaze_config: BackblazeConfigInput): async def set_backblaze_config(backup_config: BackupConfigInput):
with WriteUserData() as data: with WriteUserData() as data:
if "backblaze" not in data: if "backup" not in data:
data["backblaze"] = {} data["backup"] = {}
data["backblaze"]["accountId"] = backblaze_config.accountId data["backup"]["provider"] = "BACKBLAZE"
data["backblaze"]["accountKey"] = backblaze_config.accountKey data["backup"]["accountId"] = backup_config.accountId
data["backblaze"]["bucket"] = backblaze_config.bucket data["backup"]["accountKey"] = backup_config.accountKey
data["backup"]["bucket"] = backup_config.bucket
restic_tasks.update_keys_from_userdata() restic_tasks.update_keys_from_userdata()
return "New Backblaze settings saved" return "New backup settings saved"
@router.post("/ssh/enable") @router.post("/ssh/enable")

View file

@ -7,6 +7,7 @@ from threading import Lock
from enum import Enum from enum import Enum
import portalocker import portalocker
from selfprivacy_api.utils import ReadUserData from selfprivacy_api.utils import ReadUserData
from selfprivacy_api.utils.singleton_metaclass import SingletonMetaclass
class ResticStates(Enum): class ResticStates(Enum):
@ -21,7 +22,7 @@ class ResticStates(Enum):
INITIALIZING = 6 INITIALIZING = 6
class ResticController: class ResticController(metaclass=SingletonMetaclass):
""" """
States in wich the restic_controller may be States in wich the restic_controller may be
- no backblaze key - no backblaze key
@ -35,16 +36,8 @@ class ResticController:
Current state can be fetched with get_state() Current state can be fetched with get_state()
""" """
_instance = None
_lock = Lock()
_initialized = False _initialized = False
def __new__(cls):
if not cls._instance:
with cls._lock:
cls._instance = super(ResticController, cls).__new__(cls)
return cls._instance
def __init__(self): def __init__(self):
if self._initialized: if self._initialized:
return return

View file

@ -144,7 +144,7 @@ class Bitwarden(Service):
] ]
def move_to_volume(self, volume: BlockDevice) -> Job: def move_to_volume(self, volume: BlockDevice) -> Job:
job = Jobs.get_instance().add( job = Jobs.add(
type_id="services.bitwarden.move", type_id="services.bitwarden.move",
name="Move Bitwarden", name="Move Bitwarden",
description=f"Moving Bitwarden data to {volume.name}", description=f"Moving Bitwarden data to {volume.name}",

View file

@ -29,7 +29,7 @@ def move_service(
userdata_location: str, userdata_location: str,
): ):
"""Move a service to another volume.""" """Move a service to another volume."""
job = Jobs.get_instance().update( job = Jobs.update(
job=job, job=job,
status_text="Performing pre-move checks...", status_text="Performing pre-move checks...",
status=JobStatus.RUNNING, status=JobStatus.RUNNING,
@ -37,7 +37,7 @@ def move_service(
service_name = service.get_display_name() service_name = service.get_display_name()
with ReadUserData() as user_data: with ReadUserData() as user_data:
if not user_data.get("useBinds", False): if not user_data.get("useBinds", False):
Jobs.get_instance().update( Jobs.update(
job=job, job=job,
status=JobStatus.ERROR, status=JobStatus.ERROR,
error="Server is not using binds.", error="Server is not using binds.",
@ -46,7 +46,7 @@ def move_service(
# Check if we are on the same volume # Check if we are on the same volume
old_volume = service.get_location() old_volume = service.get_location()
if old_volume == volume.name: if old_volume == volume.name:
Jobs.get_instance().update( Jobs.update(
job=job, job=job,
status=JobStatus.ERROR, status=JobStatus.ERROR,
error=f"{service_name} is already on this volume.", error=f"{service_name} is already on this volume.",
@ -54,7 +54,7 @@ def move_service(
return return
# Check if there is enough space on the new volume # Check if there is enough space on the new volume
if int(volume.fsavail) < service.get_storage_usage(): if int(volume.fsavail) < service.get_storage_usage():
Jobs.get_instance().update( Jobs.update(
job=job, job=job,
status=JobStatus.ERROR, status=JobStatus.ERROR,
error="Not enough space on the new volume.", error="Not enough space on the new volume.",
@ -62,7 +62,7 @@ def move_service(
return return
# Make sure the volume is mounted # Make sure the volume is mounted
if volume.name != "sda1" and f"/volumes/{volume.name}" not in volume.mountpoints: if volume.name != "sda1" and f"/volumes/{volume.name}" not in volume.mountpoints:
Jobs.get_instance().update( Jobs.update(
job=job, job=job,
status=JobStatus.ERROR, status=JobStatus.ERROR,
error="Volume is not mounted.", error="Volume is not mounted.",
@ -71,14 +71,14 @@ def move_service(
# Make sure current actual directory exists and if its user and group are correct # Make sure current actual directory exists and if its user and group are correct
for folder in folder_names: for folder in folder_names:
if not pathlib.Path(f"/volumes/{old_volume}/{folder.name}").exists(): if not pathlib.Path(f"/volumes/{old_volume}/{folder.name}").exists():
Jobs.get_instance().update( Jobs.update(
job=job, job=job,
status=JobStatus.ERROR, status=JobStatus.ERROR,
error=f"{service_name} is not found.", error=f"{service_name} is not found.",
) )
return return
if not pathlib.Path(f"/volumes/{old_volume}/{folder.name}").is_dir(): if not pathlib.Path(f"/volumes/{old_volume}/{folder.name}").is_dir():
Jobs.get_instance().update( Jobs.update(
job=job, job=job,
status=JobStatus.ERROR, status=JobStatus.ERROR,
error=f"{service_name} is not a directory.", error=f"{service_name} is not a directory.",
@ -88,7 +88,7 @@ def move_service(
not pathlib.Path(f"/volumes/{old_volume}/{folder.name}").owner() not pathlib.Path(f"/volumes/{old_volume}/{folder.name}").owner()
== folder.owner == folder.owner
): ):
Jobs.get_instance().update( Jobs.update(
job=job, job=job,
status=JobStatus.ERROR, status=JobStatus.ERROR,
error=f"{service_name} owner is not {folder.owner}.", error=f"{service_name} owner is not {folder.owner}.",
@ -96,7 +96,7 @@ def move_service(
return return
# Stop service # Stop service
Jobs.get_instance().update( Jobs.update(
job=job, job=job,
status=JobStatus.RUNNING, status=JobStatus.RUNNING,
status_text=f"Stopping {service_name}...", status_text=f"Stopping {service_name}...",
@ -113,7 +113,7 @@ def move_service(
break break
time.sleep(1) time.sleep(1)
else: else:
Jobs.get_instance().update( Jobs.update(
job=job, job=job,
status=JobStatus.ERROR, status=JobStatus.ERROR,
error=f"{service_name} did not stop in 30 seconds.", error=f"{service_name} did not stop in 30 seconds.",
@ -121,7 +121,7 @@ def move_service(
return return
# Unmount old volume # Unmount old volume
Jobs.get_instance().update( Jobs.update(
job=job, job=job,
status_text="Unmounting old folder...", status_text="Unmounting old folder...",
status=JobStatus.RUNNING, status=JobStatus.RUNNING,
@ -134,14 +134,14 @@ def move_service(
check=True, check=True,
) )
except subprocess.CalledProcessError: except subprocess.CalledProcessError:
Jobs.get_instance().update( Jobs.update(
job=job, job=job,
status=JobStatus.ERROR, status=JobStatus.ERROR,
error="Unable to unmount old volume.", error="Unable to unmount old volume.",
) )
return return
# Move data to new volume and set correct permissions # Move data to new volume and set correct permissions
Jobs.get_instance().update( Jobs.update(
job=job, job=job,
status_text="Moving data to new volume...", status_text="Moving data to new volume...",
status=JobStatus.RUNNING, status=JobStatus.RUNNING,
@ -154,14 +154,14 @@ def move_service(
f"/volumes/{old_volume}/{folder.name}", f"/volumes/{old_volume}/{folder.name}",
f"/volumes/{volume.name}/{folder.name}", f"/volumes/{volume.name}/{folder.name}",
) )
Jobs.get_instance().update( Jobs.update(
job=job, job=job,
status_text="Moving data to new volume...", status_text="Moving data to new volume...",
status=JobStatus.RUNNING, status=JobStatus.RUNNING,
progress=current_progress + folder_percentage, progress=current_progress + folder_percentage,
) )
Jobs.get_instance().update( Jobs.update(
job=job, job=job,
status_text=f"Making sure {service_name} owns its files...", status_text=f"Making sure {service_name} owns its files...",
status=JobStatus.RUNNING, status=JobStatus.RUNNING,
@ -180,14 +180,14 @@ def move_service(
) )
except subprocess.CalledProcessError as error: except subprocess.CalledProcessError as error:
print(error.output) print(error.output)
Jobs.get_instance().update( Jobs.update(
job=job, job=job,
status=JobStatus.RUNNING, status=JobStatus.RUNNING,
error=f"Unable to set ownership of new volume. {service_name} may not be able to access its files. Continuing anyway.", error=f"Unable to set ownership of new volume. {service_name} may not be able to access its files. Continuing anyway.",
) )
# Mount new volume # Mount new volume
Jobs.get_instance().update( Jobs.update(
job=job, job=job,
status_text=f"Mounting {service_name} data...", status_text=f"Mounting {service_name} data...",
status=JobStatus.RUNNING, status=JobStatus.RUNNING,
@ -207,7 +207,7 @@ def move_service(
) )
except subprocess.CalledProcessError as error: except subprocess.CalledProcessError as error:
print(error.output) print(error.output)
Jobs.get_instance().update( Jobs.update(
job=job, job=job,
status=JobStatus.ERROR, status=JobStatus.ERROR,
error="Unable to mount new volume.", error="Unable to mount new volume.",
@ -215,7 +215,7 @@ def move_service(
return return
# Update userdata # Update userdata
Jobs.get_instance().update( Jobs.update(
job=job, job=job,
status_text="Finishing move...", status_text="Finishing move...",
status=JobStatus.RUNNING, status=JobStatus.RUNNING,
@ -227,7 +227,7 @@ def move_service(
user_data[userdata_location]["location"] = volume.name user_data[userdata_location]["location"] = volume.name
# Start service # Start service
service.start() service.start()
Jobs.get_instance().update( Jobs.update(
job=job, job=job,
status=JobStatus.FINISHED, status=JobStatus.FINISHED,
result=f"{service_name} moved successfully.", result=f"{service_name} moved successfully.",

View file

@ -141,7 +141,7 @@ class Gitea(Service):
] ]
def move_to_volume(self, volume: BlockDevice) -> Job: def move_to_volume(self, volume: BlockDevice) -> Job:
job = Jobs.get_instance().add( job = Jobs.add(
type_id="services.gitea.move", type_id="services.gitea.move",
name="Move Gitea", name="Move Gitea",
description=f"Moving Gitea data to {volume.name}", description=f"Moving Gitea data to {volume.name}",

View file

@ -149,7 +149,7 @@ class MailServer(Service):
] ]
def move_to_volume(self, volume: BlockDevice) -> Job: def move_to_volume(self, volume: BlockDevice) -> Job:
job = Jobs.get_instance().add( job = Jobs.add(
type_id="services.mailserver.move", type_id="services.mailserver.move",
name="Move Mail Server", name="Move Mail Server",
description=f"Moving mailserver data to {volume.name}", description=f"Moving mailserver data to {volume.name}",

View file

@ -149,7 +149,7 @@ class Nextcloud(Service):
] ]
def move_to_volume(self, volume: BlockDevice) -> Job: def move_to_volume(self, volume: BlockDevice) -> Job:
job = Jobs.get_instance().add( job = Jobs.add(
type_id="services.nextcloud.move", type_id="services.nextcloud.move",
name="Move Nextcloud", name="Move Nextcloud",
description=f"Moving Nextcloud to volume {volume.name}", description=f"Moving Nextcloud to volume {volume.name}",

View file

@ -129,7 +129,7 @@ class Pleroma(Service):
] ]
def move_to_volume(self, volume: BlockDevice) -> Job: def move_to_volume(self, volume: BlockDevice) -> Job:
job = Jobs.get_instance().add( job = Jobs.add(
type_id="services.pleroma.move", type_id="services.pleroma.move",
name="Move Pleroma", name="Move Pleroma",
description=f"Moving Pleroma to volume {volume.name}", description=f"Moving Pleroma to volume {volume.name}",

View file

@ -164,13 +164,25 @@ def parse_date(date_str: str) -> datetime.datetime:
raise ValueError("Invalid date string") raise ValueError("Invalid date string")
def get_dkim_key(domain): def get_dkim_key(domain, parse=True):
"""Get DKIM key from /var/dkim/<domain>.selector.txt""" """Get DKIM key from /var/dkim/<domain>.selector.txt"""
if os.path.exists("/var/dkim/" + domain + ".selector.txt"): if os.path.exists("/var/dkim/" + domain + ".selector.txt"):
cat_process = subprocess.Popen( cat_process = subprocess.Popen(
["cat", "/var/dkim/" + domain + ".selector.txt"], stdout=subprocess.PIPE ["cat", "/var/dkim/" + domain + ".selector.txt"], stdout=subprocess.PIPE
) )
dkim = cat_process.communicate()[0] dkim = cat_process.communicate()[0]
if parse:
# Extract key from file
dkim = dkim.split(b"(")[1]
dkim = dkim.split(b")")[0]
# Replace all quotes with nothing
dkim = dkim.replace(b'"', b"")
# Trim whitespace, remove newlines and tabs
dkim = dkim.strip()
dkim = dkim.replace(b"\n", b"")
dkim = dkim.replace(b"\t", b"")
# Remove all redundant spaces
dkim = b" ".join(dkim.split())
return str(dkim, "utf-8") return str(dkim, "utf-8")
return None return None

View file

@ -1,329 +0,0 @@
#!/usr/bin/env python3
"""Token management utils"""
import secrets
from datetime import datetime, timedelta
import re
import typing
from pydantic import BaseModel
from mnemonic import Mnemonic
from . import ReadUserData, UserDataFiles, WriteUserData, parse_date
"""
Token are stored in the tokens.json file.
File contains device tokens, recovery token and new device auth token.
File structure:
{
"tokens": [
{
"token": "device token",
"name": "device name",
"date": "date of creation",
}
],
"recovery_token": {
"token": "recovery token",
"date": "date of creation",
"expiration": "date of expiration",
"uses_left": "number of uses left"
},
"new_device": {
"token": "new device auth token",
"date": "date of creation",
"expiration": "date of expiration",
}
}
Recovery token may or may not have expiration date and uses_left.
There may be no recovery token at all.
Device tokens must be unique.
"""
def _get_tokens():
"""Get all tokens as list of tokens of every device"""
with ReadUserData(UserDataFiles.TOKENS) as tokens:
return [token["token"] for token in tokens["tokens"]]
def _get_token_names():
"""Get all token names"""
with ReadUserData(UserDataFiles.TOKENS) as tokens:
return [t["name"] for t in tokens["tokens"]]
def _validate_token_name(name):
"""Token name must be an alphanumeric string and not empty.
Replace invalid characters with '_'
If token name exists, add a random number to the end of the name until it is unique.
"""
if not re.match("^[a-zA-Z0-9]*$", name):
name = re.sub("[^a-zA-Z0-9]", "_", name)
if name == "":
name = "Unknown device"
while name in _get_token_names():
name += str(secrets.randbelow(10))
return name
def is_token_valid(token):
"""Check if token is valid"""
if token in _get_tokens():
return True
return False
def is_token_name_exists(token_name):
"""Check if token name exists"""
with ReadUserData(UserDataFiles.TOKENS) as tokens:
return token_name in [t["name"] for t in tokens["tokens"]]
def is_token_name_pair_valid(token_name, token):
"""Check if token name and token pair exists"""
with ReadUserData(UserDataFiles.TOKENS) as tokens:
for t in tokens["tokens"]:
if t["name"] == token_name and t["token"] == token:
return True
return False
def get_token_name(token: str) -> typing.Optional[str]:
"""Return the name of the token provided"""
with ReadUserData(UserDataFiles.TOKENS) as tokens:
for t in tokens["tokens"]:
if t["token"] == token:
return t["name"]
return None
class BasicTokenInfo(BaseModel):
"""Token info"""
name: str
date: datetime
def get_tokens_info():
"""Get all tokens info without tokens themselves"""
with ReadUserData(UserDataFiles.TOKENS) as tokens:
return [
BasicTokenInfo(
name=t["name"],
date=parse_date(t["date"]),
)
for t in tokens["tokens"]
]
def _generate_token():
"""Generates new token and makes sure it is unique"""
token = secrets.token_urlsafe(32)
while token in _get_tokens():
token = secrets.token_urlsafe(32)
return token
def create_token(name):
"""Create new token"""
token = _generate_token()
name = _validate_token_name(name)
with WriteUserData(UserDataFiles.TOKENS) as tokens:
tokens["tokens"].append(
{
"token": token,
"name": name,
"date": str(datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f")),
}
)
return token
def delete_token(token_name):
"""Delete token"""
with WriteUserData(UserDataFiles.TOKENS) as tokens:
tokens["tokens"] = [t for t in tokens["tokens"] if t["name"] != token_name]
def refresh_token(token: str) -> typing.Optional[str]:
"""Change the token field of the existing token"""
new_token = _generate_token()
with WriteUserData(UserDataFiles.TOKENS) as tokens:
for t in tokens["tokens"]:
if t["token"] == token:
t["token"] = new_token
return new_token
return None
def is_recovery_token_exists():
"""Check if recovery token exists"""
with ReadUserData(UserDataFiles.TOKENS) as tokens:
return "recovery_token" in tokens
def is_recovery_token_valid():
"""Check if recovery token is valid"""
with ReadUserData(UserDataFiles.TOKENS) as tokens:
if "recovery_token" not in tokens:
return False
recovery_token = tokens["recovery_token"]
if "uses_left" in recovery_token and recovery_token["uses_left"] is not None:
if recovery_token["uses_left"] <= 0:
return False
if "expiration" not in recovery_token or recovery_token["expiration"] is None:
return True
return datetime.now() < parse_date(recovery_token["expiration"])
def get_recovery_token_status():
"""Get recovery token date of creation, expiration and uses left"""
with ReadUserData(UserDataFiles.TOKENS) as tokens:
if "recovery_token" not in tokens:
return None
recovery_token = tokens["recovery_token"]
return {
"date": recovery_token["date"],
"expiration": recovery_token["expiration"]
if "expiration" in recovery_token
else None,
"uses_left": recovery_token["uses_left"]
if "uses_left" in recovery_token
else None,
}
def _get_recovery_token():
"""Get recovery token"""
with ReadUserData(UserDataFiles.TOKENS) as tokens:
if "recovery_token" not in tokens:
return None
return tokens["recovery_token"]["token"]
def generate_recovery_token(
expiration: typing.Optional[datetime], uses_left: typing.Optional[int]
) -> str:
"""Generate a 24 bytes recovery token and return a mneomnic word list.
Write a string representation of the recovery token to the tokens.json file.
"""
# expires must be a date or None
# uses_left must be an integer or None
if expiration is not None:
if not isinstance(expiration, datetime):
raise TypeError("expires must be a datetime object")
if uses_left is not None:
if not isinstance(uses_left, int):
raise TypeError("uses_left must be an integer")
if uses_left <= 0:
raise ValueError("uses_left must be greater than 0")
recovery_token = secrets.token_bytes(24)
recovery_token_str = recovery_token.hex()
with WriteUserData(UserDataFiles.TOKENS) as tokens:
tokens["recovery_token"] = {
"token": recovery_token_str,
"date": str(datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f")),
"expiration": expiration.strftime("%Y-%m-%dT%H:%M:%S.%f")
if expiration is not None
else None,
"uses_left": uses_left if uses_left is not None else None,
}
return Mnemonic(language="english").to_mnemonic(recovery_token)
def use_mnemonic_recoverery_token(mnemonic_phrase, name):
"""Use the recovery token by converting the mnemonic word list to a byte array.
If the recovery token if invalid itself, return None
If the binary representation of phrase not matches
the byte array of the recovery token, return None.
If the mnemonic phrase is valid then generate a device token and return it.
Substract 1 from uses_left if it exists.
mnemonic_phrase is a string representation of the mnemonic word list.
"""
if not is_recovery_token_valid():
return None
recovery_token_str = _get_recovery_token()
if recovery_token_str is None:
return None
recovery_token = bytes.fromhex(recovery_token_str)
if not Mnemonic(language="english").check(mnemonic_phrase):
return None
phrase_bytes = Mnemonic(language="english").to_entropy(mnemonic_phrase)
if phrase_bytes != recovery_token:
return None
token = _generate_token()
name = _validate_token_name(name)
with WriteUserData(UserDataFiles.TOKENS) as tokens:
tokens["tokens"].append(
{
"token": token,
"name": name,
"date": str(datetime.now()),
}
)
if "recovery_token" in tokens:
if (
"uses_left" in tokens["recovery_token"]
and tokens["recovery_token"]["uses_left"] is not None
):
tokens["recovery_token"]["uses_left"] -= 1
return token
def get_new_device_auth_token() -> str:
"""Generate a new device auth token which is valid for 10 minutes
and return a mnemonic phrase representation
Write token to the new_device of the tokens.json file.
"""
token = secrets.token_bytes(16)
token_str = token.hex()
with WriteUserData(UserDataFiles.TOKENS) as tokens:
tokens["new_device"] = {
"token": token_str,
"date": str(datetime.now()),
"expiration": str(datetime.now() + timedelta(minutes=10)),
}
return Mnemonic(language="english").to_mnemonic(token)
def _get_new_device_auth_token():
"""Get new device auth token. If it is expired, return None"""
with ReadUserData(UserDataFiles.TOKENS) as tokens:
if "new_device" not in tokens:
return None
new_device = tokens["new_device"]
if "expiration" not in new_device:
return None
expiration = parse_date(new_device["expiration"])
if datetime.now() > expiration:
return None
return new_device["token"]
def delete_new_device_auth_token():
"""Delete new device auth token"""
with WriteUserData(UserDataFiles.TOKENS) as tokens:
if "new_device" in tokens:
del tokens["new_device"]
def use_new_device_auth_token(mnemonic_phrase, name):
"""Use the new device auth token by converting the mnemonic string to a byte array.
If the mnemonic phrase is valid then generate a device token and return it.
New device auth token must be deleted.
"""
token_str = _get_new_device_auth_token()
if token_str is None:
return None
token = bytes.fromhex(token_str)
if not Mnemonic(language="english").check(mnemonic_phrase):
return None
phrase_bytes = Mnemonic(language="english").to_entropy(mnemonic_phrase)
if phrase_bytes != token:
return None
token = create_token(name)
with WriteUserData(UserDataFiles.TOKENS) as tokens:
if "new_device" in tokens:
del tokens["new_device"]
return token

View file

@ -4,6 +4,7 @@ import json
import typing import typing
from selfprivacy_api.utils import WriteUserData from selfprivacy_api.utils import WriteUserData
from selfprivacy_api.utils.singleton_metaclass import SingletonMetaclass
def get_block_device(device_name): def get_block_device(device_name):
@ -147,16 +148,9 @@ class BlockDevice:
return False return False
class BlockDevices: class BlockDevices(metaclass=SingletonMetaclass):
"""Singleton holding all Block devices""" """Singleton holding all Block devices"""
_instance = None
def __new__(cls, *args, **kwargs):
if not cls._instance:
cls._instance = super().__new__(cls)
return cls._instance
def __init__(self): def __init__(self):
self.block_devices = [] self.block_devices = []
self.update() self.update()

View file

@ -0,0 +1,41 @@
"""
Redis pool module for selfprivacy_api
"""
import redis
from selfprivacy_api.utils.singleton_metaclass import SingletonMetaclass
from os import environ
REDIS_SOCKET = "/run/redis-sp-api/redis.sock"
class RedisPool(metaclass=SingletonMetaclass):
"""
Redis connection pool singleton.
"""
def __init__(self):
if "USE_REDIS_PORT" in environ.keys():
self._pool = redis.ConnectionPool(
host="127.0.0.1",
port=int(environ["USE_REDIS_PORT"]),
decode_responses=True,
)
else:
self._pool = redis.ConnectionPool.from_url(
f"unix://{REDIS_SOCKET}",
decode_responses=True,
)
self._pubsub_connection = self.get_connection()
def get_connection(self):
"""
Get a connection from the pool.
"""
return redis.Redis(connection_pool=self._pool)
def get_pubsub(self):
"""
Get a pubsub connection from the pool.
"""
return self._pubsub_connection.pubsub()

View file

@ -0,0 +1,23 @@
"""
Singleton is a creational design pattern, which ensures that only
one object of its kind exists and provides a single point of access
to it for any other code.
"""
from threading import Lock
class SingletonMetaclass(type):
"""
This is a thread-safe implementation of Singleton.
"""
_instances = {}
_lock: Lock = Lock()
def __call__(cls, *args, **kwargs):
with cls._lock:
if cls not in cls._instances:
cls._instances[cls] = super(SingletonMetaclass, cls).__call__(
*args, **kwargs
)
return cls._instances[cls]

View file

@ -2,7 +2,7 @@ from setuptools import setup, find_packages
setup( setup(
name="selfprivacy_api", name="selfprivacy_api",
version="2.0.0", version="2.1.2",
packages=find_packages(), packages=find_packages(),
scripts=[ scripts=[
"selfprivacy_api/app.py", "selfprivacy_api/app.py",

View file

@ -19,7 +19,45 @@ let
fastapi fastapi
uvicorn uvicorn
redis redis
strawberry-graphql (buildPythonPackage rec {
pname = "strawberry-graphql";
version = "0.123.0";
format = "pyproject";
patches = [
./strawberry-graphql.patch
];
propagatedBuildInputs = [
typing-extensions
python-multipart
python-dateutil
# flask
pydantic
pygments
poetry
# flask-cors
(buildPythonPackage rec {
pname = "graphql-core";
version = "3.2.0";
format = "setuptools";
src = fetchPypi {
inherit pname version;
sha256 = "sha256-huKgvgCL/eGe94OI3opyWh2UKpGQykMcJKYIN5c4A84=";
};
checkInputs = [
pytest-asyncio
pytest-benchmark
pytestCheckHook
];
pythonImportsCheck = [
"graphql"
];
})
];
src = fetchPypi {
inherit pname version;
sha256 = "KsmZ5Xv8tUg6yBxieAEtvoKoRG60VS+iVGV0X6oCExo=";
};
})
]); ]);
in in
pkgs.mkShell { pkgs.mkShell {
@ -27,7 +65,6 @@ pkgs.mkShell {
sp-python sp-python
pkgs.black pkgs.black
pkgs.redis pkgs.redis
pkgs.restic
]; ];
shellHook = '' shellHook = ''
PYTHONPATH=${sp-python}/${sp-python.sitePackages} PYTHONPATH=${sp-python}/${sp-python.sitePackages}

View file

@ -1,9 +1,4 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -11,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": true "enable": true
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -49,6 +41,19 @@
"sshKeys": [ "sshKeys": [
"ssh-rsa KEY test@pc" "ssh-rsa KEY test@pc"
], ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"volumes": [ "volumes": [
] ]
} }

View file

@ -1,9 +1,4 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -11,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": true "enable": true
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -55,5 +47,18 @@
"mountPoint": "/volumes/sda1", "mountPoint": "/volumes/sda1",
"filesystem": "ext4" "filesystem": "ext4"
} }
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

View file

@ -1,9 +1,4 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -11,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": true "enable": true
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -48,5 +40,18 @@
"timezone": "Europe/Moscow", "timezone": "Europe/Moscow",
"sshKeys": [ "sshKeys": [
"ssh-rsa KEY test@pc" "ssh-rsa KEY test@pc"
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

View file

@ -2,8 +2,14 @@
# pylint: disable=unused-argument # pylint: disable=unused-argument
# pylint: disable=missing-function-docstring # pylint: disable=missing-function-docstring
import datetime import datetime
import pytest
from mnemonic import Mnemonic from mnemonic import Mnemonic
from selfprivacy_api.repositories.tokens.json_tokens_repository import (
JsonTokensRepository,
)
from selfprivacy_api.models.tokens.token import Token
from tests.common import generate_api_query, read_json, write_json from tests.common import generate_api_query, read_json, write_json
TOKENS_FILE_CONTETS = { TOKENS_FILE_CONTETS = {
@ -30,6 +36,11 @@ devices {
""" """
@pytest.fixture
def token_repo():
return JsonTokensRepository()
def test_graphql_tokens_info(authorized_client, tokens_file): def test_graphql_tokens_info(authorized_client, tokens_file):
response = authorized_client.post( response = authorized_client.post(
"/graphql", "/graphql",
@ -170,7 +181,7 @@ def test_graphql_refresh_token_unauthorized(client, tokens_file):
assert response.json()["data"] is None assert response.json()["data"] is None
def test_graphql_refresh_token(authorized_client, tokens_file): def test_graphql_refresh_token(authorized_client, tokens_file, token_repo):
response = authorized_client.post( response = authorized_client.post(
"/graphql", "/graphql",
json={"query": REFRESH_TOKEN_MUTATION}, json={"query": REFRESH_TOKEN_MUTATION},
@ -180,11 +191,12 @@ def test_graphql_refresh_token(authorized_client, tokens_file):
assert response.json()["data"]["refreshDeviceApiToken"]["success"] is True assert response.json()["data"]["refreshDeviceApiToken"]["success"] is True
assert response.json()["data"]["refreshDeviceApiToken"]["message"] is not None assert response.json()["data"]["refreshDeviceApiToken"]["message"] is not None
assert response.json()["data"]["refreshDeviceApiToken"]["code"] == 200 assert response.json()["data"]["refreshDeviceApiToken"]["code"] == 200
assert read_json(tokens_file)["tokens"][0] == { token = token_repo.get_token_by_name("test_token")
"token": response.json()["data"]["refreshDeviceApiToken"]["token"], assert token == Token(
"name": "test_token", token=response.json()["data"]["refreshDeviceApiToken"]["token"],
"date": "2022-01-14 08:31:10.789314", device_name="test_token",
} created_at=datetime.datetime(2022, 1, 14, 8, 31, 10, 789314),
)
NEW_DEVICE_KEY_MUTATION = """ NEW_DEVICE_KEY_MUTATION = """

View file

@ -0,0 +1,218 @@
# pylint: disable=redefined-outer-name
# pylint: disable=unused-argument
# pylint: disable=missing-function-docstring
"""
tests that restrict json token repository implementation
"""
import pytest
from datetime import datetime
from selfprivacy_api.models.tokens.token import Token
from selfprivacy_api.repositories.tokens.exceptions import (
TokenNotFound,
RecoveryKeyNotFound,
NewDeviceKeyNotFound,
)
from selfprivacy_api.repositories.tokens.json_tokens_repository import (
JsonTokensRepository,
)
from tests.common import read_json
from test_tokens_repository import (
mock_recovery_key_generate,
mock_generate_token,
mock_new_device_key_generate,
empty_keys,
)
ORIGINAL_TOKEN_CONTENT = [
{
"token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI",
"name": "primary_token",
"date": "2022-07-15 17:41:31.675698",
},
{
"token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68",
"name": "second_token",
"date": "2022-07-15 17:41:31.675698Z",
},
{
"token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8",
"name": "third_token",
"date": "2022-07-15T17:41:31.675698Z",
},
{
"token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM",
"name": "forth_token",
"date": "2022-07-15T17:41:31.675698",
},
]
@pytest.fixture
def tokens(mocker, datadir):
mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=datadir / "tokens.json")
assert read_json(datadir / "tokens.json")["tokens"] == ORIGINAL_TOKEN_CONTENT
return datadir
@pytest.fixture
def null_keys(mocker, datadir):
mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=datadir / "null_keys.json")
assert read_json(datadir / "null_keys.json")["recovery_token"] is None
assert read_json(datadir / "null_keys.json")["new_device"] is None
return datadir
def test_delete_token(tokens):
repo = JsonTokensRepository()
input_token = Token(
token="KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI",
device_name="primary_token",
created_at=datetime(2022, 7, 15, 17, 41, 31, 675698),
)
repo.delete_token(input_token)
assert read_json(tokens / "tokens.json")["tokens"] == [
{
"token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68",
"name": "second_token",
"date": "2022-07-15 17:41:31.675698Z",
},
{
"token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8",
"name": "third_token",
"date": "2022-07-15T17:41:31.675698Z",
},
{
"token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM",
"name": "forth_token",
"date": "2022-07-15T17:41:31.675698",
},
]
def test_delete_not_found_token(tokens):
repo = JsonTokensRepository()
input_token = Token(
token="imbadtoken",
device_name="primary_token",
created_at=datetime(2022, 7, 15, 17, 41, 31, 675698),
)
with pytest.raises(TokenNotFound):
assert repo.delete_token(input_token) is None
assert read_json(tokens / "tokens.json")["tokens"] == ORIGINAL_TOKEN_CONTENT
def test_create_recovery_key(tokens, mock_recovery_key_generate):
repo = JsonTokensRepository()
assert repo.create_recovery_key(uses_left=1, expiration=None) is not None
assert read_json(tokens / "tokens.json")["recovery_token"] == {
"token": "889bf49c1d3199d71a2e704718772bd53a422020334db051",
"date": "2022-07-15T17:41:31.675698",
"expiration": None,
"uses_left": 1,
}
def test_use_mnemonic_recovery_key_when_null(null_keys):
repo = JsonTokensRepository()
with pytest.raises(RecoveryKeyNotFound):
assert (
repo.use_mnemonic_recovery_key(
mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb",
device_name="primary_token",
)
is None
)
def test_use_mnemonic_recovery_key(tokens, mock_generate_token):
repo = JsonTokensRepository()
assert repo.use_mnemonic_recovery_key(
mnemonic_phrase="uniform clarify napkin bid dress search input armor police cross salon because myself uphold slice bamboo hungry park",
device_name="newdevice",
) == Token(
token="ur71mC4aiI6FIYAN--cTL-38rPHS5D6NuB1bgN_qKF4",
device_name="newdevice",
created_at=datetime(2022, 11, 14, 6, 6, 32, 777123),
)
assert read_json(tokens / "tokens.json")["tokens"] == [
{
"date": "2022-07-15 17:41:31.675698",
"name": "primary_token",
"token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI",
},
{
"token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68",
"name": "second_token",
"date": "2022-07-15 17:41:31.675698Z",
},
{
"token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8",
"name": "third_token",
"date": "2022-07-15T17:41:31.675698Z",
},
{
"token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM",
"name": "forth_token",
"date": "2022-07-15T17:41:31.675698",
},
{
"date": "2022-11-14T06:06:32.777123",
"name": "newdevice",
"token": "ur71mC4aiI6FIYAN--cTL-38rPHS5D6NuB1bgN_qKF4",
},
]
assert read_json(tokens / "tokens.json")["recovery_token"] == {
"date": "2022-11-11T11:48:54.228038",
"expiration": None,
"token": "ed653e4b8b042b841d285fa7a682fa09e925ddb2d8906f54",
"uses_left": 1,
}
def test_get_new_device_key(tokens, mock_new_device_key_generate):
repo = JsonTokensRepository()
assert repo.get_new_device_key() is not None
assert read_json(tokens / "tokens.json")["new_device"] == {
"date": "2022-07-15T17:41:31.675698",
"expiration": "2022-07-15T17:41:31.675698",
"token": "43478d05b35e4781598acd76e33832bb",
}
def test_delete_new_device_key(tokens):
repo = JsonTokensRepository()
assert repo.delete_new_device_key() is None
assert "new_device" not in read_json(tokens / "tokens.json")
def test_delete_new_device_key_when_empty(empty_keys):
repo = JsonTokensRepository()
repo.delete_new_device_key()
assert "new_device" not in read_json(empty_keys / "empty_keys.json")
def test_use_mnemonic_new_device_key_when_null(null_keys):
repo = JsonTokensRepository()
with pytest.raises(NewDeviceKeyNotFound):
assert (
repo.use_mnemonic_new_device_key(
device_name="imnew",
mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb",
)
is None
)

View file

@ -0,0 +1,9 @@
{
"tokens": [
{
"token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI",
"name": "primary_token",
"date": "2022-07-15 17:41:31.675698"
}
]
}

View file

@ -0,0 +1,26 @@
{
"tokens": [
{
"token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI",
"name": "primary_token",
"date": "2022-07-15 17:41:31.675698"
},
{
"token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68",
"name": "second_token",
"date": "2022-07-15 17:41:31.675698Z"
},
{
"token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8",
"name": "third_token",
"date": "2022-07-15T17:41:31.675698Z"
},
{
"token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM",
"name": "forth_token",
"date": "2022-07-15T17:41:31.675698"
}
],
"recovery_token": null,
"new_device": null
}

View file

@ -0,0 +1,35 @@
{
"tokens": [
{
"token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI",
"name": "primary_token",
"date": "2022-07-15 17:41:31.675698"
},
{
"token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68",
"name": "second_token",
"date": "2022-07-15 17:41:31.675698Z"
},
{
"token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8",
"name": "third_token",
"date": "2022-07-15T17:41:31.675698Z"
},
{
"token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM",
"name": "forth_token",
"date": "2022-07-15T17:41:31.675698"
}
],
"recovery_token": {
"token": "ed653e4b8b042b841d285fa7a682fa09e925ddb2d8906f54",
"date": "2022-11-11T11:48:54.228038",
"expiration": null,
"uses_left": 2
},
"new_device": {
"token": "2237238de23dc71ab558e317bdb8ff8e",
"date": "2022-10-26 20:50:47.973212",
"expiration": "2022-10-26 21:00:47.974153"
}
}

View file

@ -2,7 +2,8 @@
# pylint: disable=unused-argument # pylint: disable=unused-argument
# pylint: disable=missing-function-docstring # pylint: disable=missing-function-docstring
from datetime import datetime, timezone from datetime import datetime, timedelta
from mnemonic import Mnemonic
import pytest import pytest
@ -18,38 +19,22 @@ from selfprivacy_api.repositories.tokens.exceptions import (
from selfprivacy_api.repositories.tokens.json_tokens_repository import ( from selfprivacy_api.repositories.tokens.json_tokens_repository import (
JsonTokensRepository, JsonTokensRepository,
) )
from selfprivacy_api.repositories.tokens.redis_tokens_repository import (
RedisTokensRepository,
)
from tests.common import read_json from tests.common import read_json
ORIGINAL_TOKEN_CONTENT = [ ORIGINAL_DEVICE_NAMES = [
{ "primary_token",
"token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", "second_token",
"name": "primary_token", "third_token",
"date": "2022-07-15 17:41:31.675698", "forth_token",
},
{
"token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68",
"name": "second_token",
"date": "2022-07-15 17:41:31.675698Z",
},
{
"token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8",
"name": "third_token",
"date": "2022-07-15T17:41:31.675698Z",
},
{
"token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM",
"name": "forth_token",
"date": "2022-07-15T17:41:31.675698",
},
] ]
@pytest.fixture def mnemonic_from_hex(hexkey):
def tokens(mocker, datadir): return Mnemonic(language="english").to_mnemonic(bytes.fromhex(hexkey))
mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=datadir / "tokens.json")
assert read_json(datadir / "tokens.json")["tokens"] == ORIGINAL_TOKEN_CONTENT
return datadir
@pytest.fixture @pytest.fixture
@ -65,23 +50,10 @@ def empty_keys(mocker, datadir):
return datadir return datadir
@pytest.fixture
def null_keys(mocker, datadir):
mocker.patch("selfprivacy_api.utils.TOKENS_FILE", new=datadir / "null_keys.json")
assert read_json(datadir / "null_keys.json")["recovery_token"] is None
assert read_json(datadir / "null_keys.json")["new_device"] is None
return datadir
class RecoveryKeyMockReturnNotValid:
def is_valid() -> bool:
return False
@pytest.fixture @pytest.fixture
def mock_new_device_key_generate(mocker): def mock_new_device_key_generate(mocker):
mock = mocker.patch( mock = mocker.patch(
"selfprivacy_api.repositories.tokens.json_tokens_repository.NewDeviceKey.generate", "selfprivacy_api.models.tokens.new_device_key.NewDeviceKey.generate",
autospec=True, autospec=True,
return_value=NewDeviceKey( return_value=NewDeviceKey(
key="43478d05b35e4781598acd76e33832bb", key="43478d05b35e4781598acd76e33832bb",
@ -92,10 +64,25 @@ def mock_new_device_key_generate(mocker):
return mock return mock
# mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb",
@pytest.fixture
def mock_new_device_key_generate_for_mnemonic(mocker):
mock = mocker.patch(
"selfprivacy_api.models.tokens.new_device_key.NewDeviceKey.generate",
autospec=True,
return_value=NewDeviceKey(
key="2237238de23dc71ab558e317bdb8ff8e",
created_at=datetime(2022, 7, 15, 17, 41, 31, 675698),
expires_at=datetime(2022, 7, 15, 17, 41, 31, 675698),
),
)
return mock
@pytest.fixture @pytest.fixture
def mock_generate_token(mocker): def mock_generate_token(mocker):
mock = mocker.patch( mock = mocker.patch(
"selfprivacy_api.repositories.tokens.json_tokens_repository.Token.generate", "selfprivacy_api.models.tokens.token.Token.generate",
autospec=True, autospec=True,
return_value=Token( return_value=Token(
token="ur71mC4aiI6FIYAN--cTL-38rPHS5D6NuB1bgN_qKF4", token="ur71mC4aiI6FIYAN--cTL-38rPHS5D6NuB1bgN_qKF4",
@ -107,11 +94,16 @@ def mock_generate_token(mocker):
@pytest.fixture @pytest.fixture
def mock_get_recovery_key_return_not_valid(mocker): def mock_recovery_key_generate_invalid(mocker):
mock = mocker.patch( mock = mocker.patch(
"selfprivacy_api.repositories.tokens.json_tokens_repository.JsonTokensRepository.get_recovery_key", "selfprivacy_api.models.tokens.recovery_key.RecoveryKey.generate",
autospec=True, autospec=True,
return_value=RecoveryKeyMockReturnNotValid, return_value=RecoveryKey(
key="889bf49c1d3199d71a2e704718772bd53a422020334db051",
created_at=datetime(2022, 7, 15, 17, 41, 31, 675698),
expires_at=None,
uses_left=0,
),
) )
return mock return mock
@ -119,7 +111,7 @@ def mock_get_recovery_key_return_not_valid(mocker):
@pytest.fixture @pytest.fixture
def mock_token_generate(mocker): def mock_token_generate(mocker):
mock = mocker.patch( mock = mocker.patch(
"selfprivacy_api.repositories.tokens.json_tokens_repository.Token.generate", "selfprivacy_api.models.tokens.token.Token.generate",
autospec=True, autospec=True,
return_value=Token( return_value=Token(
token="ZuLNKtnxDeq6w2dpOJhbB3iat_sJLPTPl_rN5uc5MvM", token="ZuLNKtnxDeq6w2dpOJhbB3iat_sJLPTPl_rN5uc5MvM",
@ -133,7 +125,7 @@ def mock_token_generate(mocker):
@pytest.fixture @pytest.fixture
def mock_recovery_key_generate(mocker): def mock_recovery_key_generate(mocker):
mock = mocker.patch( mock = mocker.patch(
"selfprivacy_api.repositories.tokens.json_tokens_repository.RecoveryKey.generate", "selfprivacy_api.models.tokens.recovery_key.RecoveryKey.generate",
autospec=True, autospec=True,
return_value=RecoveryKey( return_value=RecoveryKey(
key="889bf49c1d3199d71a2e704718772bd53a422020334db051", key="889bf49c1d3199d71a2e704718772bd53a422020334db051",
@ -145,127 +137,158 @@ def mock_recovery_key_generate(mocker):
return mock return mock
@pytest.fixture
def empty_json_repo(empty_keys):
repo = JsonTokensRepository()
for token in repo.get_tokens():
repo.delete_token(token)
assert repo.get_tokens() == []
return repo
@pytest.fixture
def empty_redis_repo():
repo = RedisTokensRepository()
repo.reset()
assert repo.get_tokens() == []
return repo
@pytest.fixture(params=["json", "redis"])
def empty_repo(request, empty_json_repo, empty_redis_repo):
if request.param == "json":
return empty_json_repo
if request.param == "redis":
return empty_redis_repo
# return empty_json_repo
else:
raise NotImplementedError
@pytest.fixture
def some_tokens_repo(empty_repo):
for name in ORIGINAL_DEVICE_NAMES:
empty_repo.create_token(name)
assert len(empty_repo.get_tokens()) == len(ORIGINAL_DEVICE_NAMES)
for name in ORIGINAL_DEVICE_NAMES:
assert empty_repo.get_token_by_name(name) is not None
assert empty_repo.get_new_device_key() is not None
return empty_repo
############### ###############
# Test tokens # # Test tokens #
############### ###############
def test_get_token_by_token_string(tokens): def test_get_token_by_token_string(some_tokens_repo):
repo = JsonTokensRepository() repo = some_tokens_repo
test_token = repo.get_tokens()[2]
assert repo.get_token_by_token_string( assert repo.get_token_by_token_string(token_string=test_token.token) == test_token
token_string="KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI"
) == Token(
token="KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI",
device_name="primary_token",
created_at=datetime(2022, 7, 15, 17, 41, 31, 675698),
)
def test_get_token_by_non_existent_token_string(tokens): def test_get_token_by_non_existent_token_string(some_tokens_repo):
repo = JsonTokensRepository() repo = some_tokens_repo
with pytest.raises(TokenNotFound): with pytest.raises(TokenNotFound):
assert repo.get_token_by_token_string(token_string="iamBadtoken") is None assert repo.get_token_by_token_string(token_string="iamBadtoken") is None
def test_get_token_by_name(tokens): def test_get_token_by_name(some_tokens_repo):
repo = JsonTokensRepository() repo = some_tokens_repo
assert repo.get_token_by_name(token_name="primary_token") is not None token = repo.get_token_by_name(token_name="primary_token")
assert repo.get_token_by_name(token_name="primary_token") == Token( assert token is not None
token="KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", assert token.device_name == "primary_token"
device_name="primary_token", assert token in repo.get_tokens()
created_at=datetime(2022, 7, 15, 17, 41, 31, 675698),
)
def test_get_token_by_non_existent_name(tokens): def test_get_token_by_non_existent_name(some_tokens_repo):
repo = JsonTokensRepository() repo = some_tokens_repo
with pytest.raises(TokenNotFound): with pytest.raises(TokenNotFound):
assert repo.get_token_by_name(token_name="badname") is None assert repo.get_token_by_name(token_name="badname") is None
def test_get_tokens(tokens): def test_is_token_valid(some_tokens_repo):
repo = JsonTokensRepository() repo = some_tokens_repo
token = repo.get_tokens()[0]
assert repo.get_tokens() == [ assert repo.is_token_valid(token.token)
Token( assert not repo.is_token_valid("gibberish")
token="KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI",
device_name="primary_token",
created_at=datetime(2022, 7, 15, 17, 41, 31, 675698),
),
Token(
token="3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68",
device_name="second_token",
created_at=datetime(2022, 7, 15, 17, 41, 31, 675698, tzinfo=timezone.utc),
),
Token(
token="LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8",
device_name="third_token",
created_at=datetime(2022, 7, 15, 17, 41, 31, 675698, tzinfo=timezone.utc),
),
Token(
token="dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM",
device_name="forth_token",
created_at=datetime(2022, 7, 15, 17, 41, 31, 675698),
),
]
def test_get_tokens_when_one(empty_keys): def test_is_token_name_pair_valid(some_tokens_repo):
repo = JsonTokensRepository() repo = some_tokens_repo
token = repo.get_tokens()[0]
assert repo.get_tokens() == [ assert repo.is_token_name_pair_valid(token.device_name, token.token)
Token( assert not repo.is_token_name_pair_valid(token.device_name, "gibberish")
token="KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", assert not repo.is_token_name_pair_valid("gibberish", token.token)
device_name="primary_token",
created_at=datetime(2022, 7, 15, 17, 41, 31, 675698),
)
]
def test_create_token(tokens, mock_token_generate): def test_is_token_name_exists(some_tokens_repo):
repo = JsonTokensRepository() repo = some_tokens_repo
token = repo.get_tokens()[0]
assert repo.is_token_name_exists(token.device_name)
assert not repo.is_token_name_exists("gibberish")
def test_get_tokens(some_tokens_repo):
repo = some_tokens_repo
tokenstrings = []
# we cannot insert tokens directly via api, so we check meta-properties instead
for token in repo.get_tokens():
len(token.token) == 43 # assuming secrets.token_urlsafe
assert token.token not in tokenstrings
tokenstrings.append(token.token)
assert token.created_at.day == datetime.today().day
def test_create_token(empty_repo, mock_token_generate):
repo = empty_repo
assert repo.create_token(device_name="IamNewDevice") == Token( assert repo.create_token(device_name="IamNewDevice") == Token(
token="ZuLNKtnxDeq6w2dpOJhbB3iat_sJLPTPl_rN5uc5MvM", token="ZuLNKtnxDeq6w2dpOJhbB3iat_sJLPTPl_rN5uc5MvM",
device_name="IamNewDevice", device_name="IamNewDevice",
created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), created_at=datetime(2022, 7, 15, 17, 41, 31, 675698),
) )
assert repo.get_tokens() == [
Token(
def test_delete_token(tokens): token="ZuLNKtnxDeq6w2dpOJhbB3iat_sJLPTPl_rN5uc5MvM",
repo = JsonTokensRepository() device_name="IamNewDevice",
input_token = Token( created_at=datetime(2022, 7, 15, 17, 41, 31, 675698),
token="KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", )
device_name="primary_token",
created_at=datetime(2022, 7, 15, 17, 41, 31, 675698),
)
repo.delete_token(input_token)
assert read_json(tokens / "tokens.json")["tokens"] == [
{
"token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68",
"name": "second_token",
"date": "2022-07-15 17:41:31.675698Z",
},
{
"token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8",
"name": "third_token",
"date": "2022-07-15T17:41:31.675698Z",
},
{
"token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM",
"name": "forth_token",
"date": "2022-07-15T17:41:31.675698",
},
] ]
def test_delete_not_found_token(tokens): def test_create_token_existing(some_tokens_repo):
repo = JsonTokensRepository() repo = some_tokens_repo
old_token = repo.get_tokens()[0]
new_token = repo.create_token(device_name=old_token.device_name)
assert new_token.device_name != old_token.device_name
assert old_token in repo.get_tokens()
assert new_token in repo.get_tokens()
def test_delete_token(some_tokens_repo):
repo = some_tokens_repo
original_tokens = repo.get_tokens()
input_token = original_tokens[1]
repo.delete_token(input_token)
tokens_after_delete = repo.get_tokens()
for token in original_tokens:
if token != input_token:
assert token in tokens_after_delete
assert len(original_tokens) == len(tokens_after_delete) + 1
def test_delete_not_found_token(some_tokens_repo):
repo = some_tokens_repo
initial_tokens = repo.get_tokens()
input_token = Token( input_token = Token(
token="imbadtoken", token="imbadtoken",
device_name="primary_token", device_name="primary_token",
@ -274,26 +297,27 @@ def test_delete_not_found_token(tokens):
with pytest.raises(TokenNotFound): with pytest.raises(TokenNotFound):
assert repo.delete_token(input_token) is None assert repo.delete_token(input_token) is None
assert read_json(tokens / "tokens.json")["tokens"] == ORIGINAL_TOKEN_CONTENT new_tokens = repo.get_tokens()
assert len(new_tokens) == len(initial_tokens)
for token in initial_tokens:
assert token in new_tokens
def test_refresh_token(tokens, mock_token_generate): def test_refresh_token(some_tokens_repo):
repo = JsonTokensRepository() repo = some_tokens_repo
input_token = Token( input_token = some_tokens_repo.get_tokens()[0]
token="KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI",
device_name="primary_token",
created_at=datetime(2022, 7, 15, 17, 41, 31, 675698),
)
assert repo.refresh_token(input_token) == Token( output_token = repo.refresh_token(input_token)
token="ZuLNKtnxDeq6w2dpOJhbB3iat_sJLPTPl_rN5uc5MvM",
device_name="IamNewDevice", assert output_token.token != input_token.token
created_at=datetime(2022, 7, 15, 17, 41, 31, 675698), assert output_token.device_name == input_token.device_name
) assert output_token.created_at == input_token.created_at
assert output_token in repo.get_tokens()
def test_refresh_not_found_token(tokens, mock_token_generate): def test_refresh_not_found_token(some_tokens_repo, mock_token_generate):
repo = JsonTokensRepository() repo = some_tokens_repo
input_token = Token( input_token = Token(
token="idontknowwhoiam", token="idontknowwhoiam",
device_name="tellmewhoiam?", device_name="tellmewhoiam?",
@ -309,39 +333,26 @@ def test_refresh_not_found_token(tokens, mock_token_generate):
################ ################
def test_get_recovery_key(tokens): def test_get_recovery_key_when_empty(empty_repo):
repo = JsonTokensRepository() repo = empty_repo
assert repo.get_recovery_key() == RecoveryKey(
key="ed653e4b8b042b841d285fa7a682fa09e925ddb2d8906f54",
created_at=datetime(2022, 11, 11, 11, 48, 54, 228038),
expires_at=None,
uses_left=2,
)
def test_get_recovery_key_when_empty(empty_keys):
repo = JsonTokensRepository()
assert repo.get_recovery_key() is None assert repo.get_recovery_key() is None
def test_create_recovery_key(tokens, mock_recovery_key_generate): def test_create_get_recovery_key(some_tokens_repo, mock_recovery_key_generate):
repo = JsonTokensRepository() repo = some_tokens_repo
assert repo.create_recovery_key(uses_left=1, expiration=None) is not None assert repo.create_recovery_key(uses_left=1, expiration=None) is not None
assert read_json(tokens / "tokens.json")["recovery_token"] == { assert repo.get_recovery_key() == RecoveryKey(
"token": "889bf49c1d3199d71a2e704718772bd53a422020334db051", key="889bf49c1d3199d71a2e704718772bd53a422020334db051",
"date": "2022-07-15T17:41:31.675698", created_at=datetime(2022, 7, 15, 17, 41, 31, 675698),
"expiration": None, expires_at=None,
"uses_left": 1, uses_left=1,
} )
def test_use_mnemonic_recovery_key_when_empty( def test_use_mnemonic_recovery_key_when_empty(empty_repo):
empty_keys, mock_recovery_key_generate, mock_token_generate repo = empty_repo
):
repo = JsonTokensRepository()
with pytest.raises(RecoveryKeyNotFound): with pytest.raises(RecoveryKeyNotFound):
assert ( assert (
@ -354,9 +365,10 @@ def test_use_mnemonic_recovery_key_when_empty(
def test_use_mnemonic_not_valid_recovery_key( def test_use_mnemonic_not_valid_recovery_key(
tokens, mock_get_recovery_key_return_not_valid some_tokens_repo, mock_recovery_key_generate_invalid
): ):
repo = JsonTokensRepository() repo = some_tokens_repo
assert repo.create_recovery_key(uses_left=0, expiration=None) is not None
with pytest.raises(RecoveryKeyNotFound): with pytest.raises(RecoveryKeyNotFound):
assert ( assert (
@ -368,8 +380,26 @@ def test_use_mnemonic_not_valid_recovery_key(
) )
def test_use_mnemonic_not_mnemonic_recovery_key(tokens): def test_use_mnemonic_expired_recovery_key(
repo = JsonTokensRepository() some_tokens_repo,
):
repo = some_tokens_repo
expiration = datetime.now() - timedelta(minutes=5)
assert repo.create_recovery_key(uses_left=2, expiration=expiration) is not None
recovery_key = repo.get_recovery_key()
assert recovery_key.expires_at == expiration
assert not repo.is_recovery_key_valid()
with pytest.raises(RecoveryKeyNotFound):
token = repo.use_mnemonic_recovery_key(
mnemonic_phrase=mnemonic_from_hex(recovery_key.key),
device_name="newdevice",
)
def test_use_mnemonic_not_mnemonic_recovery_key(some_tokens_repo):
repo = some_tokens_repo
assert repo.create_recovery_key(uses_left=1, expiration=None) is not None
with pytest.raises(InvalidMnemonic): with pytest.raises(InvalidMnemonic):
assert ( assert (
@ -381,8 +411,9 @@ def test_use_mnemonic_not_mnemonic_recovery_key(tokens):
) )
def test_use_not_mnemonic_recovery_key(tokens): def test_use_not_mnemonic_recovery_key(some_tokens_repo):
repo = JsonTokensRepository() repo = some_tokens_repo
assert repo.create_recovery_key(uses_left=1, expiration=None) is not None
with pytest.raises(InvalidMnemonic): with pytest.raises(InvalidMnemonic):
assert ( assert (
@ -394,8 +425,9 @@ def test_use_not_mnemonic_recovery_key(tokens):
) )
def test_use_not_found_mnemonic_recovery_key(tokens): def test_use_not_found_mnemonic_recovery_key(some_tokens_repo):
repo = JsonTokensRepository() repo = some_tokens_repo
assert repo.create_recovery_key(uses_left=1, expiration=None) is not None
with pytest.raises(RecoveryKeyNotFound): with pytest.raises(RecoveryKeyNotFound):
assert ( assert (
@ -407,78 +439,39 @@ def test_use_not_found_mnemonic_recovery_key(tokens):
) )
def test_use_menemonic_recovery_key_when_empty(empty_keys): @pytest.fixture(params=["recovery_uses_1", "recovery_eternal"])
repo = JsonTokensRepository() def recovery_key_uses_left(request):
if request.param == "recovery_uses_1":
with pytest.raises(RecoveryKeyNotFound): return 1
assert ( if request.param == "recovery_eternal":
repo.use_mnemonic_recovery_key( return None
mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb",
device_name="primary_token",
)
is None
)
def test_use_menemonic_recovery_key_when_null(null_keys): def test_use_mnemonic_recovery_key(some_tokens_repo, recovery_key_uses_left):
repo = JsonTokensRepository() repo = some_tokens_repo
assert (
repo.create_recovery_key(uses_left=recovery_key_uses_left, expiration=None)
is not None
)
assert repo.is_recovery_key_valid()
recovery_key = repo.get_recovery_key()
with pytest.raises(RecoveryKeyNotFound): token = repo.use_mnemonic_recovery_key(
assert ( mnemonic_phrase=mnemonic_from_hex(recovery_key.key),
repo.use_mnemonic_recovery_key(
mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb",
device_name="primary_token",
)
is None
)
def test_use_mnemonic_recovery_key(tokens, mock_generate_token):
repo = JsonTokensRepository()
assert repo.use_mnemonic_recovery_key(
mnemonic_phrase="uniform clarify napkin bid dress search input armor police cross salon because myself uphold slice bamboo hungry park",
device_name="newdevice", device_name="newdevice",
) == Token(
token="ur71mC4aiI6FIYAN--cTL-38rPHS5D6NuB1bgN_qKF4",
device_name="newdevice",
created_at=datetime(2022, 11, 14, 6, 6, 32, 777123),
) )
assert read_json(tokens / "tokens.json")["tokens"] == [ assert token.device_name == "newdevice"
{ assert token in repo.get_tokens()
"date": "2022-07-15 17:41:31.675698", new_uses = None
"name": "primary_token", if recovery_key_uses_left is not None:
"token": "KG9ni-B-CMPk327Zv1qC7YBQaUGaBUcgdkvMvQ2atFI", new_uses = recovery_key_uses_left - 1
}, assert repo.get_recovery_key() == RecoveryKey(
{ key=recovery_key.key,
"token": "3JKgLOtFu6ZHgE4OU-R-VdW47IKpg-YQL0c6n7bol68", created_at=recovery_key.created_at,
"name": "second_token", expires_at=None,
"date": "2022-07-15 17:41:31.675698Z", uses_left=new_uses,
}, )
{
"token": "LYiwFDekvALKTQSjk7vtMQuNP_6wqKuV-9AyMKytI_8",
"name": "third_token",
"date": "2022-07-15T17:41:31.675698Z",
},
{
"token": "dD3CFPcEZvapscgzWb7JZTLog7OMkP7NzJeu2fAazXM",
"name": "forth_token",
"date": "2022-07-15T17:41:31.675698",
},
{
"date": "2022-11-14T06:06:32.777123",
"name": "newdevice",
"token": "ur71mC4aiI6FIYAN--cTL-38rPHS5D6NuB1bgN_qKF4",
},
]
assert read_json(tokens / "tokens.json")["recovery_token"] == {
"date": "2022-11-11T11:48:54.228038",
"expiration": None,
"token": "ed653e4b8b042b841d285fa7a682fa09e925ddb2d8906f54",
"uses_left": 1,
}
################## ##################
@ -486,35 +479,31 @@ def test_use_mnemonic_recovery_key(tokens, mock_generate_token):
################## ##################
def test_get_new_device_key(tokens, mock_new_device_key_generate): def test_get_new_device_key(some_tokens_repo, mock_new_device_key_generate):
repo = JsonTokensRepository() repo = some_tokens_repo
assert repo.get_new_device_key() is not None assert repo.get_new_device_key() == NewDeviceKey(
assert read_json(tokens / "tokens.json")["new_device"] == { key="43478d05b35e4781598acd76e33832bb",
"date": "2022-07-15T17:41:31.675698", created_at=datetime(2022, 7, 15, 17, 41, 31, 675698),
"expiration": "2022-07-15T17:41:31.675698", expires_at=datetime(2022, 7, 15, 17, 41, 31, 675698),
"token": "43478d05b35e4781598acd76e33832bb", )
}
def test_delete_new_device_key(tokens): def test_delete_new_device_key(some_tokens_repo):
repo = JsonTokensRepository() repo = some_tokens_repo
assert repo.delete_new_device_key() is None assert repo.delete_new_device_key() is None
assert "new_device" not in read_json(tokens / "tokens.json") # we cannot say if there is ot not without creating it?
def test_delete_new_device_key_when_empty(empty_keys): def test_delete_new_device_key_when_empty(empty_repo):
repo = JsonTokensRepository() repo = empty_repo
repo.delete_new_device_key() assert repo.delete_new_device_key() is None
assert "new_device" not in read_json(empty_keys / "empty_keys.json")
def test_use_invalid_mnemonic_new_device_key( def test_use_invalid_mnemonic_new_device_key(some_tokens_repo):
tokens, mock_new_device_key_generate, datadir, mock_token_generate repo = some_tokens_repo
):
repo = JsonTokensRepository()
with pytest.raises(InvalidMnemonic): with pytest.raises(InvalidMnemonic):
assert ( assert (
@ -527,9 +516,10 @@ def test_use_invalid_mnemonic_new_device_key(
def test_use_not_exists_mnemonic_new_device_key( def test_use_not_exists_mnemonic_new_device_key(
tokens, mock_new_device_key_generate, mock_token_generate empty_repo, mock_new_device_key_generate
): ):
repo = JsonTokensRepository() repo = empty_repo
assert repo.get_new_device_key() is not None
with pytest.raises(NewDeviceKeyNotFound): with pytest.raises(NewDeviceKeyNotFound):
assert ( assert (
@ -541,36 +531,54 @@ def test_use_not_exists_mnemonic_new_device_key(
) )
def test_use_mnemonic_new_device_key( def test_use_mnemonic_new_device_key(empty_repo):
tokens, mock_new_device_key_generate, mock_token_generate repo = empty_repo
): key = repo.get_new_device_key()
repo = JsonTokensRepository() assert key is not None
assert ( mnemonic_phrase = mnemonic_from_hex(key.key)
repo.use_mnemonic_new_device_key(
device_name="imnew", new_token = repo.use_mnemonic_new_device_key(
mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb", device_name="imnew",
) mnemonic_phrase=mnemonic_phrase,
is not None
) )
# assert read_json(datadir / "tokens.json")["new_device"] == []
assert new_token.device_name == "imnew"
assert new_token in repo.get_tokens()
# we must delete the key after use
with pytest.raises(NewDeviceKeyNotFound):
assert (
repo.use_mnemonic_new_device_key(
device_name="imnew",
mnemonic_phrase=mnemonic_phrase,
)
is None
)
def test_use_mnemonic_new_device_key_when_empty(empty_keys): def test_use_mnemonic_expired_new_device_key(
repo = JsonTokensRepository() some_tokens_repo,
):
with pytest.raises(NewDeviceKeyNotFound): repo = some_tokens_repo
assert ( expiration = datetime.now() - timedelta(minutes=5)
repo.use_mnemonic_new_device_key(
device_name="imnew", key = repo.get_new_device_key()
mnemonic_phrase="captain ribbon toddler settle symbol minute step broccoli bless universe divide bulb", assert key is not None
) assert key.expires_at is not None
is None key.expires_at = expiration
) assert not key.is_valid()
repo._store_new_device_key(key)
def test_use_mnemonic_new_device_key_when_null(null_keys): with pytest.raises(NewDeviceKeyNotFound):
repo = JsonTokensRepository() token = repo.use_mnemonic_new_device_key(
mnemonic_phrase=mnemonic_from_hex(key.key),
device_name="imnew",
)
def test_use_mnemonic_new_device_key_when_empty(empty_repo):
repo = empty_repo
with pytest.raises(NewDeviceKeyNotFound): with pytest.raises(NewDeviceKeyNotFound):
assert ( assert (

View file

@ -1,9 +1,4 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -11,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": false "enable": false
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -67,5 +59,18 @@
"username": "user3", "username": "user3",
"hashedPassword": "HASHED_PASSWORD_3" "hashedPassword": "HASHED_PASSWORD_3"
} }
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

View file

@ -1,9 +1,4 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -11,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": true "enable": true
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -46,5 +38,18 @@
"timezone": "Europe/Moscow", "timezone": "Europe/Moscow",
"sshKeys": [ "sshKeys": [
"ssh-rsa KEY test@pc" "ssh-rsa KEY test@pc"
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

View file

@ -1,9 +1,4 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -11,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": true "enable": true
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -48,5 +40,18 @@
"timezone": "Europe/Moscow", "timezone": "Europe/Moscow",
"sshKeys": [ "sshKeys": [
"ssh-rsa KEY test@pc" "ssh-rsa KEY test@pc"
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

View file

@ -1,9 +1,4 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -11,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": true "enable": true
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -51,5 +43,18 @@
"timezone": "Europe/Moscow", "timezone": "Europe/Moscow",
"sshKeys": [ "sshKeys": [
"ssh-rsa KEY test@pc" "ssh-rsa KEY test@pc"
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

View file

@ -1,9 +1,4 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -11,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": true "enable": true
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -43,5 +35,18 @@
}, },
"sshKeys": [ "sshKeys": [
"ssh-rsa KEY test@pc" "ssh-rsa KEY test@pc"
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

View file

@ -1,9 +1,4 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -11,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": false "enable": false
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -50,5 +42,18 @@
"ssh-rsa KEY test@pc" "ssh-rsa KEY test@pc"
], ],
"users": [ "users": [
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

View file

@ -1,9 +1,4 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -11,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": false "enable": false
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -57,5 +49,18 @@
"ssh-rsa KEY user1@pc" "ssh-rsa KEY user1@pc"
] ]
} }
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

View file

@ -1,9 +1,4 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -11,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": false "enable": false
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -67,5 +59,18 @@
"username": "user3", "username": "user3",
"hashedPassword": "HASHED_PASSWORD_3" "hashedPassword": "HASHED_PASSWORD_3"
} }
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

View file

@ -1,9 +1,4 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -11,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": false "enable": false
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -48,5 +40,18 @@
"timezone": "Europe/Moscow", "timezone": "Europe/Moscow",
"sshKeys": [ "sshKeys": [
"ssh-rsa KEY test@pc" "ssh-rsa KEY test@pc"
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

View file

@ -1,16 +1,96 @@
# pylint: disable=redefined-outer-name # pylint: disable=redefined-outer-name
# pylint: disable=unused-argument # pylint: disable=unused-argument
import json
import pytest import pytest
from selfprivacy_api.utils import WriteUserData, ReadUserData
from selfprivacy_api.jobs import Jobs, JobStatus from selfprivacy_api.jobs import Jobs, JobStatus
import selfprivacy_api.jobs as jobsmodule
def test_jobs(authorized_client, jobs_file, shared_datadir): def test_add_reset(jobs_with_one_job):
jobs = Jobs() jobs_with_one_job.reset()
assert jobs_with_one_job.get_jobs() == []
def test_minimal_update(jobs_with_one_job):
jobs = jobs_with_one_job
test_job = jobs_with_one_job.get_jobs()[0]
jobs.update(job=test_job, status=JobStatus.ERROR)
assert jobs.get_jobs() == [test_job]
def test_remove_by_uid(jobs_with_one_job):
test_job = jobs_with_one_job.get_jobs()[0]
uid_str = str(test_job.uid)
assert jobs_with_one_job.remove_by_uid(uid_str)
assert jobs_with_one_job.get_jobs() == []
assert not jobs_with_one_job.remove_by_uid(uid_str)
def test_remove_update_nonexistent(jobs_with_one_job):
test_job = jobs_with_one_job.get_jobs()[0]
jobs_with_one_job.remove(test_job)
assert jobs_with_one_job.get_jobs() == []
result = jobs_with_one_job.update(job=test_job, status=JobStatus.ERROR)
assert result == test_job # even though we might consider changing this behavior
def test_remove_get_nonexistent(jobs_with_one_job):
test_job = jobs_with_one_job.get_jobs()[0]
uid_str = str(test_job.uid)
assert jobs_with_one_job.get_job(uid_str) == test_job
jobs_with_one_job.remove(test_job)
assert jobs_with_one_job.get_job(uid_str) is None
def test_jobs(jobs_with_one_job):
jobs = jobs_with_one_job
test_job = jobs_with_one_job.get_jobs()[0]
assert not jobs.is_busy()
jobs.update(
job=test_job,
name="Write Tests",
description="An oddly satisfying experience",
status=JobStatus.RUNNING,
status_text="Status text",
progress=50,
)
assert jobs.get_jobs() == [test_job]
assert jobs.is_busy()
backup = jobsmodule.JOB_EXPIRATION_SECONDS
jobsmodule.JOB_EXPIRATION_SECONDS = 0
jobs.update(
job=test_job,
status=JobStatus.FINISHED,
status_text="Yaaay!",
progress=100,
)
assert jobs.get_jobs() == [] assert jobs.get_jobs() == []
jobsmodule.JOB_EXPIRATION_SECONDS = backup
@pytest.fixture
def jobs():
j = Jobs()
j.reset()
assert j.get_jobs() == []
yield j
j.reset()
@pytest.fixture
def jobs_with_one_job(jobs):
test_job = jobs.add( test_job = jobs.add(
type_id="test", type_id="test",
name="Test job", name="Test job",
@ -19,32 +99,5 @@ def test_jobs(authorized_client, jobs_file, shared_datadir):
status_text="Status text", status_text="Status text",
progress=0, progress=0,
) )
assert jobs.get_jobs() == [test_job] assert jobs.get_jobs() == [test_job]
return jobs
jobs.update(
job=test_job,
status=JobStatus.RUNNING,
status_text="Status text",
progress=50,
)
assert jobs.get_jobs() == [test_job]
@pytest.fixture
def mock_subprocess_run(mocker):
mock = mocker.patch("subprocess.run", autospec=True)
return mock
@pytest.fixture
def mock_shutil_move(mocker):
mock = mocker.patch("shutil.move", autospec=True)
return mock
@pytest.fixture
def mock_shutil_chown(mocker):
mock = mocker.patch("shutil.chown", autospec=True)
return mock

18
tests/test_models.py Normal file
View file

@ -0,0 +1,18 @@
import pytest
from datetime import datetime, timedelta
from selfprivacy_api.models.tokens.recovery_key import RecoveryKey
from selfprivacy_api.models.tokens.new_device_key import NewDeviceKey
def test_recovery_key_expired():
expiration = datetime.now() - timedelta(minutes=5)
key = RecoveryKey.generate(expiration=expiration, uses_left=2)
assert not key.is_valid()
def test_new_device_key_expired():
expiration = datetime.now() - timedelta(minutes=5)
key = NewDeviceKey.generate()
key.expires_at = expiration
assert not key.is_valid()

View file

@ -1,18 +1,10 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
}, },
"bitwarden": { "bitwarden": {
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -47,5 +39,18 @@
"timezone": "Europe/Moscow", "timezone": "Europe/Moscow",
"sshKeys": [ "sshKeys": [
"ssh-rsa KEY test@pc" "ssh-rsa KEY test@pc"
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

View file

@ -1,9 +1,4 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -11,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": false "enable": false
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -48,5 +40,18 @@
"timezone": "Europe/Moscow", "timezone": "Europe/Moscow",
"sshKeys": [ "sshKeys": [
"ssh-rsa KEY test@pc" "ssh-rsa KEY test@pc"
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

View file

@ -1,9 +1,4 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -11,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": true "enable": true
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -48,5 +40,18 @@
"timezone": "Europe/Moscow", "timezone": "Europe/Moscow",
"sshKeys": [ "sshKeys": [
"ssh-rsa KEY test@pc" "ssh-rsa KEY test@pc"
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

View file

@ -1,16 +1,8 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -45,5 +37,18 @@
"timezone": "Europe/Moscow", "timezone": "Europe/Moscow",
"sshKeys": [ "sshKeys": [
"ssh-rsa KEY test@pc" "ssh-rsa KEY test@pc"
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

View file

@ -1,9 +1,4 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -11,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": false "enable": false
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -47,5 +39,18 @@
"timezone": "Europe/Moscow", "timezone": "Europe/Moscow",
"sshKeys": [ "sshKeys": [
"ssh-rsa KEY test@pc" "ssh-rsa KEY test@pc"
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

View file

@ -1,9 +1,4 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -11,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": false "enable": false
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -48,5 +40,18 @@
"timezone": "Europe/Moscow", "timezone": "Europe/Moscow",
"sshKeys": [ "sshKeys": [
"ssh-rsa KEY test@pc" "ssh-rsa KEY test@pc"
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

View file

@ -1,9 +1,4 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -11,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": false "enable": false
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -48,5 +40,18 @@
"timezone": "Europe/Moscow", "timezone": "Europe/Moscow",
"sshKeys": [ "sshKeys": [
"ssh-rsa KEY test@pc" "ssh-rsa KEY test@pc"
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

View file

@ -1,9 +1,4 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -11,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": false "enable": false
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -45,5 +37,18 @@
"timezone": "Europe/Moscow", "timezone": "Europe/Moscow",
"sshKeys": [ "sshKeys": [
"ssh-rsa KEY test@pc" "ssh-rsa KEY test@pc"
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

View file

@ -2,6 +2,8 @@ import base64
import json import json
import pytest import pytest
from selfprivacy_api.utils import get_dkim_key
############################################################################### ###############################################################################
@ -13,7 +15,10 @@ class ProcessMock:
self.kwargs = kwargs self.kwargs = kwargs
def communicate(): def communicate():
return (b"I am a DKIM key", None) return (
b'selector._domainkey\tIN\tTXT\t( "v=DKIM1; k=rsa; "\n\t "p=MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDNn/IhEz1SxgHxxxI8vlPYC2dNueiLe1GC4SYz8uHimC8SDkMvAwm7rqi2SimbFgGB5nccCNOqCkrIqJTCB9vufqBnVKAjshHqpOr5hk4JJ1T/AGQKWinstmDbfTLPYTbU8ijZrwwGeqQLlnXR5nSN0GB9GazheA9zaPsT6PV+aQIDAQAB" ) ; ----- DKIM key selector for example.com\n',
None,
)
class NoFileMock(ProcessMock): class NoFileMock(ProcessMock):
@ -63,11 +68,27 @@ def test_illegal_methods(authorized_client, mock_subproccess_popen):
assert response.status_code == 405 assert response.status_code == 405
def test_dkim_key(authorized_client, mock_subproccess_popen): def test_get_dkim_key(mock_subproccess_popen):
"""Test DKIM key""" """Test DKIM key"""
dkim_key = get_dkim_key("example.com")
assert (
dkim_key
== "v=DKIM1; k=rsa; p=MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDNn/IhEz1SxgHxxxI8vlPYC2dNueiLe1GC4SYz8uHimC8SDkMvAwm7rqi2SimbFgGB5nccCNOqCkrIqJTCB9vufqBnVKAjshHqpOr5hk4JJ1T/AGQKWinstmDbfTLPYTbU8ijZrwwGeqQLlnXR5nSN0GB9GazheA9zaPsT6PV+aQIDAQAB"
)
assert mock_subproccess_popen.call_args[0][0] == [
"cat",
"/var/dkim/example.com.selector.txt",
]
def test_dkim_key(authorized_client, mock_subproccess_popen):
"""Test old REST DKIM key endpoint"""
response = authorized_client.get("/services/mailserver/dkim") response = authorized_client.get("/services/mailserver/dkim")
assert response.status_code == 200 assert response.status_code == 200
assert base64.b64decode(response.text) == b"I am a DKIM key" assert (
base64.b64decode(response.text)
== b'selector._domainkey\tIN\tTXT\t( "v=DKIM1; k=rsa; "\n\t "p=MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDNn/IhEz1SxgHxxxI8vlPYC2dNueiLe1GC4SYz8uHimC8SDkMvAwm7rqi2SimbFgGB5nccCNOqCkrIqJTCB9vufqBnVKAjshHqpOr5hk4JJ1T/AGQKWinstmDbfTLPYTbU8ijZrwwGeqQLlnXR5nSN0GB9GazheA9zaPsT6PV+aQIDAQAB" ) ; ----- DKIM key selector for example.com\n'
)
assert mock_subproccess_popen.call_args[0][0] == [ assert mock_subproccess_popen.call_args[0][0] == [
"cat", "cat",
"/var/dkim/example.com.selector.txt", "/var/dkim/example.com.selector.txt",

View file

@ -1,9 +1,4 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -11,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": false "enable": false
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -47,5 +39,18 @@
"timezone": "Europe/Moscow", "timezone": "Europe/Moscow",
"sshKeys": [ "sshKeys": [
"ssh-rsa KEY test@pc" "ssh-rsa KEY test@pc"
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

View file

@ -1,9 +1,4 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -11,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": false "enable": false
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -48,5 +40,18 @@
"timezone": "Europe/Moscow", "timezone": "Europe/Moscow",
"sshKeys": [ "sshKeys": [
"ssh-rsa KEY test@pc" "ssh-rsa KEY test@pc"
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

View file

@ -1,9 +1,4 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -11,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": false "enable": false
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -48,5 +40,18 @@
"timezone": "Europe/Moscow", "timezone": "Europe/Moscow",
"sshKeys": [ "sshKeys": [
"ssh-rsa KEY test@pc" "ssh-rsa KEY test@pc"
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

View file

@ -1,16 +1,8 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -40,5 +32,18 @@
"timezone": "Europe/Moscow", "timezone": "Europe/Moscow",
"sshKeys": [ "sshKeys": [
"ssh-rsa KEY test@pc" "ssh-rsa KEY test@pc"
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

View file

@ -1,9 +1,4 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -11,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": false "enable": false
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -47,5 +39,18 @@
"timezone": "Europe/Moscow", "timezone": "Europe/Moscow",
"sshKeys": [ "sshKeys": [
"ssh-rsa KEY test@pc" "ssh-rsa KEY test@pc"
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

View file

@ -1,9 +1,4 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -11,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": false "enable": false
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -48,5 +40,18 @@
"timezone": "Europe/Moscow", "timezone": "Europe/Moscow",
"sshKeys": [ "sshKeys": [
"ssh-rsa KEY test@pc" "ssh-rsa KEY test@pc"
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

View file

@ -1,9 +1,4 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -11,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": false "enable": false
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -48,5 +40,18 @@
"timezone": "Europe/Moscow", "timezone": "Europe/Moscow",
"sshKeys": [ "sshKeys": [
"ssh-rsa KEY test@pc" "ssh-rsa KEY test@pc"
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

View file

@ -1,9 +1,4 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -11,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": false "enable": false
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -45,5 +37,18 @@
"timezone": "Europe/Moscow", "timezone": "Europe/Moscow",
"sshKeys": [ "sshKeys": [
"ssh-rsa KEY test@pc" "ssh-rsa KEY test@pc"
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

View file

@ -1,9 +1,4 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -11,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": false "enable": false
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -47,5 +39,18 @@
"timezone": "Europe/Moscow", "timezone": "Europe/Moscow",
"sshKeys": [ "sshKeys": [
"ssh-rsa KEY test@pc" "ssh-rsa KEY test@pc"
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

View file

@ -1,9 +1,4 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -11,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": false "enable": false
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -48,5 +40,18 @@
"timezone": "Europe/Moscow", "timezone": "Europe/Moscow",
"sshKeys": [ "sshKeys": [
"ssh-rsa KEY test@pc" "ssh-rsa KEY test@pc"
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

View file

@ -1,9 +1,4 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -11,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": false "enable": false
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -48,5 +40,18 @@
"timezone": "Europe/Moscow", "timezone": "Europe/Moscow",
"sshKeys": [ "sshKeys": [
"ssh-rsa KEY test@pc" "ssh-rsa KEY test@pc"
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

View file

@ -1,9 +1,4 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -11,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": false "enable": false
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -45,5 +37,18 @@
"timezone": "Europe/Moscow", "timezone": "Europe/Moscow",
"sshKeys": [ "sshKeys": [
"ssh-rsa KEY test@pc" "ssh-rsa KEY test@pc"
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

View file

@ -161,7 +161,7 @@ def mock_restic_tasks(mocker):
@pytest.fixture @pytest.fixture
def undefined_settings(mocker, datadir): def undefined_settings(mocker, datadir):
mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "undefined.json") mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "undefined.json")
assert "backblaze" not in read_json(datadir / "undefined.json") assert "backup" not in read_json(datadir / "undefined.json")
return datadir return datadir
@ -170,20 +170,22 @@ def some_settings(mocker, datadir):
mocker.patch( mocker.patch(
"selfprivacy_api.utils.USERDATA_FILE", new=datadir / "some_values.json" "selfprivacy_api.utils.USERDATA_FILE", new=datadir / "some_values.json"
) )
assert "backblaze" in read_json(datadir / "some_values.json") assert "backup" in read_json(datadir / "some_values.json")
assert read_json(datadir / "some_values.json")["backblaze"]["accountId"] == "ID" assert read_json(datadir / "some_values.json")["backup"]["provider"] == "BACKBLAZE"
assert read_json(datadir / "some_values.json")["backblaze"]["accountKey"] == "KEY" assert read_json(datadir / "some_values.json")["backup"]["accountId"] == "ID"
assert read_json(datadir / "some_values.json")["backblaze"]["bucket"] == "BUCKET" assert read_json(datadir / "some_values.json")["backup"]["accountKey"] == "KEY"
assert read_json(datadir / "some_values.json")["backup"]["bucket"] == "BUCKET"
return datadir return datadir
@pytest.fixture @pytest.fixture
def no_values(mocker, datadir): def no_values(mocker, datadir):
mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "no_values.json") mocker.patch("selfprivacy_api.utils.USERDATA_FILE", new=datadir / "no_values.json")
assert "backblaze" in read_json(datadir / "no_values.json") assert "backup" in read_json(datadir / "no_values.json")
assert "accountId" not in read_json(datadir / "no_values.json")["backblaze"] assert "provider" not in read_json(datadir / "no_values.json")["backup"]
assert "accountKey" not in read_json(datadir / "no_values.json")["backblaze"] assert "accountId" not in read_json(datadir / "no_values.json")["backup"]
assert "bucket" not in read_json(datadir / "no_values.json")["backblaze"] assert "accountKey" not in read_json(datadir / "no_values.json")["backup"]
assert "bucket" not in read_json(datadir / "no_values.json")["backup"]
return datadir return datadir
@ -462,7 +464,8 @@ def test_set_backblaze_config(
) )
assert response.status_code == 200 assert response.status_code == 200
assert mock_restic_tasks.update_keys_from_userdata.call_count == 1 assert mock_restic_tasks.update_keys_from_userdata.call_count == 1
assert read_json(some_settings / "some_values.json")["backblaze"] == { assert read_json(some_settings / "some_values.json")["backup"] == {
"provider": "BACKBLAZE",
"accountId": "123", "accountId": "123",
"accountKey": "456", "accountKey": "456",
"bucket": "789", "bucket": "789",
@ -478,7 +481,8 @@ def test_set_backblaze_config_on_undefined(
) )
assert response.status_code == 200 assert response.status_code == 200
assert mock_restic_tasks.update_keys_from_userdata.call_count == 1 assert mock_restic_tasks.update_keys_from_userdata.call_count == 1
assert read_json(undefined_settings / "undefined.json")["backblaze"] == { assert read_json(undefined_settings / "undefined.json")["backup"] == {
"provider": "BACKBLAZE",
"accountId": "123", "accountId": "123",
"accountKey": "456", "accountKey": "456",
"bucket": "789", "bucket": "789",
@ -494,7 +498,8 @@ def test_set_backblaze_config_on_no_values(
) )
assert response.status_code == 200 assert response.status_code == 200
assert mock_restic_tasks.update_keys_from_userdata.call_count == 1 assert mock_restic_tasks.update_keys_from_userdata.call_count == 1
assert read_json(no_values / "no_values.json")["backblaze"] == { assert read_json(no_values / "no_values.json")["backup"] == {
"provider": "BACKBLAZE",
"accountId": "123", "accountId": "123",
"accountKey": "456", "accountKey": "456",
"bucket": "789", "bucket": "789",

View file

@ -1,6 +1,4 @@
{ {
"backblaze": {
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -8,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": false "enable": false
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -64,5 +59,14 @@
"username": "user3", "username": "user3",
"hashedPassword": "HASHED_PASSWORD_3" "hashedPassword": "HASHED_PASSWORD_3"
} }
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
}
} }

View file

@ -1,9 +1,4 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "BUCKET"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -11,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": false "enable": false
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -67,5 +59,18 @@
"username": "user3", "username": "user3",
"hashedPassword": "HASHED_PASSWORD_3" "hashedPassword": "HASHED_PASSWORD_3"
} }
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "BUCKET"
}
} }

View file

@ -6,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": false "enable": false
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -62,5 +59,12 @@
"username": "user3", "username": "user3",
"hashedPassword": "HASHED_PASSWORD_3" "hashedPassword": "HASHED_PASSWORD_3"
} }
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
}
} }

View file

@ -1,9 +1,4 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -11,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": false "enable": false
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -48,5 +40,18 @@
"timezone": "Europe/Moscow", "timezone": "Europe/Moscow",
"sshKeys": [ "sshKeys": [
"ssh-rsa KEY test@pc" "ssh-rsa KEY test@pc"
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

View file

@ -1,9 +1,4 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -11,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": false "enable": false
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -48,5 +40,18 @@
"timezone": "Europe/Moscow", "timezone": "Europe/Moscow",
"sshKeys": [ "sshKeys": [
"ssh-rsa KEY test@pc" "ssh-rsa KEY test@pc"
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

View file

@ -1,9 +1,4 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -11,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": false "enable": false
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -67,5 +59,18 @@
"username": "user3", "username": "user3",
"hashedPassword": "HASHED_PASSWORD_3" "hashedPassword": "HASHED_PASSWORD_3"
} }
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

View file

@ -1,9 +1,4 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -11,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": false "enable": false
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -42,5 +34,18 @@
"enable": true, "enable": true,
"allowReboot": true "allowReboot": true
}, },
"timezone": "Europe/Moscow" "timezone": "Europe/Moscow",
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

View file

@ -1,9 +1,4 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -11,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": false "enable": false
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -42,5 +34,18 @@
"enable": true, "enable": true,
"allowReboot": true "allowReboot": true
}, },
"timezone": "Europe/Moscow" "timezone": "Europe/Moscow",
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

View file

@ -1,9 +1,4 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -11,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": false "enable": false
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -38,5 +30,18 @@
"enable": true, "enable": true,
"allowReboot": true "allowReboot": true
}, },
"timezone": "Europe/Moscow" "timezone": "Europe/Moscow",
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

View file

@ -1,9 +1,4 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -11,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": false "enable": false
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -42,5 +34,18 @@
"timezone": "Europe/Moscow", "timezone": "Europe/Moscow",
"sshKeys": [ "sshKeys": [
"ssh-rsa KEY test@pc" "ssh-rsa KEY test@pc"
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

View file

@ -5,6 +5,12 @@ import datetime
import pytest import pytest
from mnemonic import Mnemonic from mnemonic import Mnemonic
from selfprivacy_api.repositories.tokens.json_tokens_repository import (
JsonTokensRepository,
)
TOKEN_REPO = JsonTokensRepository()
from tests.common import read_json, write_json from tests.common import read_json, write_json
@ -97,7 +103,7 @@ def test_refresh_token(authorized_client, tokens_file):
response = authorized_client.post("/auth/tokens") response = authorized_client.post("/auth/tokens")
assert response.status_code == 200 assert response.status_code == 200
new_token = response.json()["token"] new_token = response.json()["token"]
assert read_json(tokens_file)["tokens"][0]["token"] == new_token assert TOKEN_REPO.get_token_by_token_string(new_token) is not None
# new device # new device

View file

@ -1,9 +1,4 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -11,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": true "enable": true
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -46,5 +38,18 @@
"timezone": "Europe/Moscow", "timezone": "Europe/Moscow",
"sshKeys": [ "sshKeys": [
"ssh-rsa KEY test@pc" "ssh-rsa KEY test@pc"
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

View file

@ -1,9 +1,4 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -11,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": true "enable": true
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -48,5 +40,18 @@
"timezone": "Europe/Moscow", "timezone": "Europe/Moscow",
"sshKeys": [ "sshKeys": [
"ssh-rsa KEY test@pc" "ssh-rsa KEY test@pc"
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

View file

@ -1,9 +1,4 @@
{ {
"backblaze": {
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
},
"api": { "api": {
"token": "TEST_TOKEN", "token": "TEST_TOKEN",
"enableSwagger": false "enableSwagger": false
@ -11,9 +6,6 @@
"bitwarden": { "bitwarden": {
"enable": true "enable": true
}, },
"cloudflare": {
"apiKey": "TOKEN"
},
"databasePassword": "PASSWORD", "databasePassword": "PASSWORD",
"domain": "test.tld", "domain": "test.tld",
"hashedMasterPassword": "HASHED_PASSWORD", "hashedMasterPassword": "HASHED_PASSWORD",
@ -48,5 +40,18 @@
"timezone": "Europe/Moscow", "timezone": "Europe/Moscow",
"sshKeys": [ "sshKeys": [
"ssh-rsa KEY test@pc" "ssh-rsa KEY test@pc"
] ],
"dns": {
"provider": "CLOUDFLARE",
"apiKey": "TOKEN"
},
"server": {
"provider": "HETZNER"
},
"backup": {
"provider": "BACKBLAZE",
"accountId": "ID",
"accountKey": "KEY",
"bucket": "selfprivacy"
}
} }

Some files were not shown because too many files have changed in this diff Show more