fix: Static root for postgre dumps

This commit is contained in:
Inex Code 2024-12-23 21:41:24 +03:00
parent d5a3fb3928
commit a813f71fd3
No known key found for this signature in database
2 changed files with 33 additions and 15 deletions

View file

@ -4,6 +4,7 @@ import subprocess
import json import json
import datetime import datetime
import tempfile import tempfile
import logging
from typing import List, Optional, TypeVar, Callable from typing import List, Optional, TypeVar, Callable
from collections.abc import Iterable from collections.abc import Iterable
@ -28,6 +29,8 @@ FILESYSTEM_TIMEOUT_SEC = 60
T = TypeVar("T", bound=Callable) T = TypeVar("T", bound=Callable)
logger = logging.getLogger(__name__)
def unlocked_repo(func: T) -> T: def unlocked_repo(func: T) -> T:
"""unlock repo and retry if it appears to be locked""" """unlock repo and retry if it appears to be locked"""
@ -219,6 +222,10 @@ class ResticBackupper(AbstractBackupper):
tags=tags, tags=tags,
) )
logger.info(
"Starting backup: " + " ".join(self._censor_command(backup_command))
)
try: try:
messages = ResticBackupper._run_backup_command(backup_command, job) messages = ResticBackupper._run_backup_command(backup_command, job)

View file

@ -5,10 +5,9 @@ from enum import Enum
import logging import logging
import json import json
import subprocess import subprocess
import shutil
from typing import List, Optional from typing import List, Optional
from os.path import join, exists from os.path import join, exists
from os import mkdir from os import mkdir, remove
from pydantic import BaseModel, ConfigDict from pydantic import BaseModel, ConfigDict
from pydantic.alias_generators import to_camel from pydantic.alias_generators import to_camel
@ -409,7 +408,7 @@ class TemplatedService(Service):
def _get_db_dumps_folder(self) -> str: def _get_db_dumps_folder(self) -> str:
# Get the drive where the service is located and append the folder name # Get the drive where the service is located and append the folder name
return join("/", "volumes", self.get_drive(), f"db_dumps_{self.get_id()}") return join("/var/lib/postgresql-dumps", self.get_id())
def get_folders(self) -> List[str]: def get_folders(self) -> List[str]:
folders = self.meta.folders folders = self.meta.folders
@ -481,13 +480,10 @@ class TemplatedService(Service):
def pre_backup(self, job: Job): def pre_backup(self, job: Job):
logger.warning("Pre backup") logger.warning("Pre backup")
if self.get_postgresql_databases(): if self.get_postgresql_databases():
logger.warning("Pre backup: postgresql databases")
# Create the folder for the database dumps
db_dumps_folder = self._get_db_dumps_folder() db_dumps_folder = self._get_db_dumps_folder()
logger.warning(f"Pre backup: db_dumps_folder: {db_dumps_folder}") logger.warning("Pre backup: postgresql databases")
if exists(db_dumps_folder): # Create folder for the dumps if it does not exist
logger.warning("Pre backup: db_dumps_folder exists") if not exists(db_dumps_folder):
shutil.rmtree(db_dumps_folder, ignore_errors=True)
mkdir(db_dumps_folder) mkdir(db_dumps_folder)
# Dump the databases # Dump the databases
for db_name in self.get_postgresql_databases(): for db_name in self.get_postgresql_databases():
@ -503,19 +499,33 @@ class TemplatedService(Service):
logger.warning(f"Pre backup: backup_file: {backup_file}") logger.warning(f"Pre backup: backup_file: {backup_file}")
db_dumper.backup_database(backup_file) db_dumper.backup_database(backup_file)
def _clear_db_dumps(self):
db_dumps_folder = self._get_db_dumps_folder()
for db_name in self.get_postgresql_databases():
backup_file = join(db_dumps_folder, f"{db_name}.sql.gz")
if exists(backup_file):
remove(backup_file)
unpacked_file = backup_file.replace(".gz", "")
if exists(unpacked_file):
remove(unpacked_file)
def post_backup(self, job: Job): def post_backup(self, job: Job):
if self.get_postgresql_databases(): if self.get_postgresql_databases():
# Remove the folder for the database dumps
db_dumps_folder = self._get_db_dumps_folder() db_dumps_folder = self._get_db_dumps_folder()
if exists(db_dumps_folder): # Remove the backup files
shutil.rmtree(db_dumps_folder, ignore_errors=True) for db_name in self.get_postgresql_databases():
backup_file = join(db_dumps_folder, f"{db_name}.sql.gz")
if exists(backup_file):
remove(backup_file)
def pre_restore(self, job: Job): def pre_restore(self, job: Job):
if self.get_postgresql_databases(): if self.get_postgresql_databases():
# Create the folder for the database dumps # Create folder for the dumps if it does not exist
db_dumps_folder = self._get_db_dumps_folder() db_dumps_folder = self._get_db_dumps_folder()
if not exists(db_dumps_folder): if not exists(db_dumps_folder):
mkdir(db_dumps_folder) mkdir(db_dumps_folder)
# Remove existing dumps if they exist
self._clear_db_dumps()
def post_restore(self, job: Job): def post_restore(self, job: Job):
if self.get_postgresql_databases(): if self.get_postgresql_databases():
@ -535,4 +545,5 @@ class TemplatedService(Service):
else: else:
logger.error(f"Database dump for {db_name} not found") logger.error(f"Database dump for {db_name} not found")
raise FileNotFoundError(f"Database dump for {db_name} not found") raise FileNotFoundError(f"Database dump for {db_name} not found")
shutil.rmtree(self._get_db_dumps_folder(), ignore_errors=True) # Remove the dumps
self._clear_db_dumps()