feat: Use proper logging (#154)

Reviewed-on: https://git.selfprivacy.org/SelfPrivacy/selfprivacy-rest-api/pulls/154
Reviewed-by: Inex Code <inex.code@selfprivacy.org>
Co-authored-by: dettlaff <dettlaff@riseup.net>
Co-committed-by: dettlaff <dettlaff@riseup.net>
This commit is contained in:
dettlaff 2024-10-23 14:38:01 +03:00 committed by Inex Code
parent 03d751e591
commit 848befe3f1
8 changed files with 48 additions and 17 deletions

View file

@ -1,5 +1,8 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
"""SelfPrivacy server management API""" """SelfPrivacy server management API"""
import logging
import os
from fastapi import FastAPI from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware from fastapi.middleware.cors import CORSMiddleware
from strawberry.fastapi import GraphQLRouter from strawberry.fastapi import GraphQLRouter
@ -12,6 +15,12 @@ from selfprivacy_api.graphql.schema import schema
from selfprivacy_api.migrations import run_migrations from selfprivacy_api.migrations import run_migrations
log_level = os.getenv("LOG_LEVEL", "INFO").upper()
logging.basicConfig(
level=getattr(logging, log_level, logging.INFO), format="%(levelname)s: %(message)s"
)
app = FastAPI() app = FastAPI()
graphql_app: GraphQLRouter = GraphQLRouter( graphql_app: GraphQLRouter = GraphQLRouter(

View file

@ -2,6 +2,7 @@
# pylint: disable=too-few-public-methods # pylint: disable=too-few-public-methods
import typing import typing
import strawberry import strawberry
from selfprivacy_api.graphql.common_types.service import ( from selfprivacy_api.graphql.common_types.service import (

View file

@ -3,6 +3,7 @@
import subprocess import subprocess
import pathlib import pathlib
import shutil import shutil
import logging
from pydantic import BaseModel from pydantic import BaseModel
from selfprivacy_api.jobs import Job, JobStatus, Jobs from selfprivacy_api.jobs import Job, JobStatus, Jobs
@ -15,6 +16,8 @@ from selfprivacy_api.utils import ReadUserData, WriteUserData
from selfprivacy_api.utils.huey import huey from selfprivacy_api.utils.huey import huey
from selfprivacy_api.utils.block_devices import BlockDevices from selfprivacy_api.utils.block_devices import BlockDevices
logger = logging.getLogger(__name__)
class BindMigrationConfig(BaseModel): class BindMigrationConfig(BaseModel):
"""Config for bind migration. """Config for bind migration.
@ -69,7 +72,7 @@ def move_folder(
try: try:
data_path.mkdir(mode=0o750, parents=True, exist_ok=True) data_path.mkdir(mode=0o750, parents=True, exist_ok=True)
except Exception as error: except Exception as error:
print(f"Error creating data path: {error}") logging.error(f"Error creating data path: {error}")
return return
try: try:
@ -81,12 +84,12 @@ def move_folder(
try: try:
subprocess.run(["mount", "--bind", str(bind_path), str(data_path)], check=True) subprocess.run(["mount", "--bind", str(bind_path), str(data_path)], check=True)
except subprocess.CalledProcessError as error: except subprocess.CalledProcessError as error:
print(error) logging.error(error)
try: try:
subprocess.run(["chown", "-R", f"{user}:{group}", str(data_path)], check=True) subprocess.run(["chown", "-R", f"{user}:{group}", str(data_path)], check=True)
except subprocess.CalledProcessError as error: except subprocess.CalledProcessError as error:
print(error) logging.error(error)
@huey.task() @huey.task()

View file

@ -9,6 +9,8 @@ with IDs of the migrations to skip.
Adding DISABLE_ALL to that array disables the migrations module entirely. Adding DISABLE_ALL to that array disables the migrations module entirely.
""" """
import logging
from selfprivacy_api.utils import ReadUserData, UserDataFiles from selfprivacy_api.utils import ReadUserData, UserDataFiles
from selfprivacy_api.migrations.write_token_to_redis import WriteTokenToRedis from selfprivacy_api.migrations.write_token_to_redis import WriteTokenToRedis
from selfprivacy_api.migrations.check_for_system_rebuild_jobs import ( from selfprivacy_api.migrations.check_for_system_rebuild_jobs import (
@ -17,6 +19,8 @@ from selfprivacy_api.migrations.check_for_system_rebuild_jobs import (
from selfprivacy_api.migrations.add_roundcube import AddRoundcube from selfprivacy_api.migrations.add_roundcube import AddRoundcube
from selfprivacy_api.migrations.add_monitoring import AddMonitoring from selfprivacy_api.migrations.add_monitoring import AddMonitoring
logger = logging.getLogger(__name__)
migrations = [ migrations = [
WriteTokenToRedis(), WriteTokenToRedis(),
CheckForSystemRebuildJobs(), CheckForSystemRebuildJobs(),
@ -47,6 +51,6 @@ def run_migrations():
if migration.is_migration_needed(): if migration.is_migration_needed():
migration.migrate() migration.migrate()
except Exception as err: except Exception as err:
print(f"Error while migrating {migration.get_migration_name()}") logging.error(f"Error while migrating {migration.get_migration_name()}")
print(err) logging.error(err)
print("Skipping this migration") logging.error("Skipping this migration")

View file

@ -1,3 +1,4 @@
import logging
from datetime import datetime from datetime import datetime
from typing import Optional from typing import Optional
from selfprivacy_api.migrations.migration import Migration from selfprivacy_api.migrations.migration import Migration
@ -11,6 +12,8 @@ from selfprivacy_api.repositories.tokens.abstract_tokens_repository import (
) )
from selfprivacy_api.utils import ReadUserData, UserDataFiles from selfprivacy_api.utils import ReadUserData, UserDataFiles
logger = logging.getLogger(__name__)
class WriteTokenToRedis(Migration): class WriteTokenToRedis(Migration):
"""Load Json tokens into Redis""" """Load Json tokens into Redis"""
@ -35,7 +38,7 @@ class WriteTokenToRedis(Migration):
created_at=datetime.now(), created_at=datetime.now(),
) )
except Exception as e: except Exception as e:
print(e) logging.error(e)
return None return None
def is_migration_needed(self) -> bool: def is_migration_needed(self) -> bool:
@ -45,7 +48,7 @@ class WriteTokenToRedis(Migration):
): ):
return True return True
except Exception as e: except Exception as e:
print(e) logging.error(e)
return False return False
return False return False
@ -54,11 +57,11 @@ class WriteTokenToRedis(Migration):
try: try:
token = self.get_token_from_json() token = self.get_token_from_json()
if token is None: if token is None:
print("No token found in secrets.json") logging.error("No token found in secrets.json")
return return
RedisTokensRepository()._store_token(token) RedisTokensRepository()._store_token(token)
print("Done") logging.error("Done")
except Exception as e: except Exception as e:
print(e) logging.error(e)
print("Error migrating access tokens from json to redis") logging.error("Error migrating access tokens from json to redis")

View file

@ -1,5 +1,6 @@
"""Services module.""" """Services module."""
import logging
import base64 import base64
import typing import typing
from typing import List from typing import List
@ -30,6 +31,8 @@ from selfprivacy_api.utils import read_account_uri
CONFIG_STASH_DIR = "/etc/selfprivacy/dump" CONFIG_STASH_DIR = "/etc/selfprivacy/dump"
logger = logging.getLogger(__name__)
class ServiceManager(Service): class ServiceManager(Service):
folders: List[str] = [CONFIG_STASH_DIR] folders: List[str] = [CONFIG_STASH_DIR]
@ -76,7 +79,7 @@ class ServiceManager(Service):
) )
) )
except Exception as e: except Exception as e:
print(f"Error creating CAA: {e}") logging.error(f"Error creating CAA: {e}")
for service in ServiceManager.get_enabled_services(): for service in ServiceManager.get_enabled_services():
dns_records += service.get_dns_records(ip4, ip6) dns_records += service.get_dns_records(ip4, ip6)

View file

@ -1,6 +1,9 @@
"""Generic size counter using pathlib""" """Generic size counter using pathlib"""
import pathlib import pathlib
import logging
logger = logging.getLogger(__name__)
def get_storage_usage(path: str) -> int: def get_storage_usage(path: str) -> int:
@ -18,5 +21,5 @@ def get_storage_usage(path: str) -> int:
except FileNotFoundError: except FileNotFoundError:
pass pass
except Exception as error: except Exception as error:
print(error) logging.error(error)
return storage_usage return storage_usage

View file

@ -1,11 +1,16 @@
from __future__ import annotations from __future__ import annotations
import logging
import subprocess import subprocess
import pathlib import pathlib
from pydantic import BaseModel
from os.path import exists from os.path import exists
from pydantic import BaseModel
from selfprivacy_api.utils.block_devices import BlockDevice, BlockDevices from selfprivacy_api.utils.block_devices import BlockDevice, BlockDevices
logger = logging.getLogger(__name__)
# tests override it to a tmpdir # tests override it to a tmpdir
VOLUMES_PATH = "/volumes" VOLUMES_PATH = "/volumes"
@ -87,7 +92,7 @@ class Bind:
check=True, check=True,
) )
except subprocess.CalledProcessError as error: except subprocess.CalledProcessError as error:
print(error.stderr) logging.error(error.stderr)
raise BindError(f"Unable to bind {source} to {target} :{error.stderr}") raise BindError(f"Unable to bind {source} to {target} :{error.stderr}")
def unbind(self) -> None: def unbind(self) -> None:
@ -119,7 +124,7 @@ class Bind:
stderr=subprocess.PIPE, stderr=subprocess.PIPE,
) )
except subprocess.CalledProcessError as error: except subprocess.CalledProcessError as error:
print(error.stderr) logging.error(error.stderr)
error_message = ( error_message = (
f"Unable to set ownership of {true_location} :{error.stderr}" f"Unable to set ownership of {true_location} :{error.stderr}"
) )