mirror of
https://git.selfprivacy.org/SelfPrivacy/selfprivacy-rest-api.git
synced 2024-11-29 15:31:28 +00:00
test: fix nix collect garbage, add tests
This commit is contained in:
parent
510b94039e
commit
2340a0f8e9
|
@ -1,105 +1,128 @@
|
|||
import re
|
||||
import subprocess
|
||||
|
||||
from selfprivacy_api.jobs import Job, JobStatus, Jobs
|
||||
from selfprivacy_api.utils.huey import huey
|
||||
from selfprivacy_api.jobs import JobStatus, Jobs
|
||||
|
||||
|
||||
COMPLETED_WITH_ERROR = "Completed with an error"
|
||||
RESULT_WAAS_NOT_FOUND_ERROR = "We are sorry, result was not found :("
|
||||
CLEAR_COMPLETED = "Сleaning completed."
|
||||
|
||||
|
||||
def run_nix_store_print_dead():
|
||||
return subprocess.check_output(["nix-store", "--gc", "--print-dead"])
|
||||
return subprocess.check_output(["nix-store", "--gc", "--print-dead"]).decode(
|
||||
"utf-8"
|
||||
)
|
||||
|
||||
|
||||
def run_nix_collect_garbage():
|
||||
return subprocess.Popen(
|
||||
["nix-collect-garbage", "-d"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||
).stdout
|
||||
|
||||
|
||||
def set_job_status_wrapper(Jobs, job):
|
||||
def set_job_status(status, progress, status_text, result="Default result"):
|
||||
Jobs.update(
|
||||
job=job,
|
||||
status=status,
|
||||
progress=progress,
|
||||
status_text=status_text,
|
||||
result=result,
|
||||
)
|
||||
|
||||
return set_job_status
|
||||
|
||||
def parse_line(line, job: Job):
|
||||
pattern = re.compile(r"[+-]?\d+\.\d+ \w+ freed")
|
||||
|
||||
def parse_line(line):
|
||||
pattern = re.compile(r"[+-]?\d+\.\d+ \w+(?= freed)")
|
||||
match = re.search(
|
||||
pattern,
|
||||
line,
|
||||
)
|
||||
|
||||
if match is None:
|
||||
Jobs.update(
|
||||
job=job,
|
||||
status=JobStatus.FINISHED,
|
||||
progress=100,
|
||||
status_text="Completed with an error",
|
||||
result="We are sorry, result was not found :(",
|
||||
return (
|
||||
JobStatus.FINISHED,
|
||||
100,
|
||||
COMPLETED_WITH_ERROR,
|
||||
RESULT_WAAS_NOT_FOUND_ERROR,
|
||||
)
|
||||
|
||||
else:
|
||||
Jobs.update(
|
||||
job=job,
|
||||
status=JobStatus.FINISHED,
|
||||
progress=100,
|
||||
status_text="Сleaning completed.",
|
||||
result=f"{match.group(0)} have been cleared",
|
||||
return (
|
||||
JobStatus.FINISHED,
|
||||
100,
|
||||
CLEAR_COMPLETED,
|
||||
f"{match.group(0)} have been cleared",
|
||||
)
|
||||
|
||||
|
||||
def stream_process(
|
||||
process,
|
||||
stream,
|
||||
package_equal_to_percent,
|
||||
job: Job,
|
||||
set_job_status,
|
||||
):
|
||||
go = process.poll() is None
|
||||
percent = 0
|
||||
|
||||
for line in process.stdout:
|
||||
for line in stream:
|
||||
if "deleting '/nix/store/" in line:
|
||||
percent += package_equal_to_percent
|
||||
|
||||
Jobs.update(
|
||||
job=job,
|
||||
set_job_status(
|
||||
status=JobStatus.RUNNING,
|
||||
progress=int(percent),
|
||||
status_text="Сleaning...",
|
||||
)
|
||||
|
||||
elif "store paths deleted," in line:
|
||||
parse_line(line, job)
|
||||
|
||||
return go
|
||||
status = parse_line(line)
|
||||
set_job_status(
|
||||
status=status[0],
|
||||
progress=status[1],
|
||||
status_text=status[2],
|
||||
result=status[3],
|
||||
)
|
||||
|
||||
|
||||
def get_dead_packages(output):
|
||||
dead = len(re.findall("/nix/store/", output))
|
||||
percent = None
|
||||
if dead != 0:
|
||||
percent = 100 / dead
|
||||
return dead, percent
|
||||
|
||||
|
||||
@huey.task()
|
||||
def nix_collect_garbage(
|
||||
job: Job,
|
||||
job,
|
||||
jobs=Jobs,
|
||||
run_nix_store=run_nix_store_print_dead,
|
||||
run_nix_collect=run_nix_collect_garbage,
|
||||
set_job_status=None,
|
||||
): # innocent as a pure function
|
||||
set_job_status = set_job_status or set_job_status_wrapper(jobs, job)
|
||||
|
||||
Jobs.update(
|
||||
job=job,
|
||||
set_job_status(
|
||||
status=JobStatus.RUNNING,
|
||||
progress=0,
|
||||
status_text="Сalculate the number of dead packages...",
|
||||
)
|
||||
|
||||
output = run_nix_store()
|
||||
|
||||
dead_packages = len(re.findall("/nix/store/", output.decode("utf-8")))
|
||||
dead_packages, package_equal_to_percent = get_dead_packages(run_nix_store())
|
||||
|
||||
if dead_packages == 0:
|
||||
Jobs.update(
|
||||
job=job,
|
||||
set_job_status(
|
||||
status=JobStatus.FINISHED,
|
||||
progress=100,
|
||||
status_text="Nothing to clear",
|
||||
result="System is clear",
|
||||
)
|
||||
return
|
||||
|
||||
package_equal_to_percent = 100 / dead_packages
|
||||
|
||||
Jobs.update(
|
||||
job=job,
|
||||
set_job_status(
|
||||
status=JobStatus.RUNNING,
|
||||
progress=0,
|
||||
status_text=f"Found {dead_packages} packages to remove!",
|
||||
)
|
||||
|
||||
stream_process(run_nix_collect, package_equal_to_percent, job)
|
||||
stream_process(run_nix_collect(), package_equal_to_percent, set_job_status)
|
||||
|
|
|
@ -3,30 +3,146 @@
|
|||
# pylint: disable=missing-function-docstring
|
||||
|
||||
import pytest
|
||||
from selfprivacy_api.jobs import JobStatus
|
||||
|
||||
from selfprivacy_api.jobs.nix_collect_garbage import nix_collect_garbage
|
||||
from selfprivacy_api.jobs.nix_collect_garbage import (
|
||||
get_dead_packages,
|
||||
nix_collect_garbage,
|
||||
parse_line,
|
||||
CLEAR_COMPLETED,
|
||||
COMPLETED_WITH_ERROR,
|
||||
stream_process,
|
||||
RESULT_WAAS_NOT_FOUND_ERROR,
|
||||
)
|
||||
|
||||
|
||||
output_print_dead = """
|
||||
finding garbage collector roots...
|
||||
determining live/dead paths...
|
||||
/nix/store/02k8pmw00p7p7mf2dg3n057771w7liia-python3.10-cchardet-2.1.7
|
||||
/nix/store/03vc6dznx8njbvyd3gfhfa4n5j4lvhbl-python3.10-async-timeout-4.0.2
|
||||
/nix/store/03ybv2dvfk7c3cpb527y5kzf6i35ch41-python3.10-pycparser-2.21
|
||||
/nix/store/04dn9slfqwhqisn1j3jv531lms9w5wlj-python3.10-hypothesis-6.50.1.drv
|
||||
/nix/store/04hhx2z1iyi3b48hxykiw1g03lp46jk7-python-remove-bin-bytecode-hook
|
||||
"""
|
||||
|
||||
|
||||
|
||||
created_at: datetime.datetime
|
||||
updated_at: datetime.datetime
|
||||
uid: UUID
|
||||
type_id: str
|
||||
name: str
|
||||
description: str
|
||||
status: JobStatus
|
||||
output_collect_garbage = """
|
||||
removing old generations of profile /nix/var/nix/profiles/per-user/def/channels
|
||||
finding garbage collector roots...
|
||||
deleting garbage...
|
||||
deleting '/nix/store/02k8pmw00p7p7mf2dg3n057771w7liia-python3.10-cchardet-2.1.7'
|
||||
deleting '/nix/store/03vc6dznx8njbvyd3gfhfa4n5j4lvhbl-python3.10-async-timeout-4.0.2'
|
||||
deleting '/nix/store/03ybv2dvfk7c3cpb527y5kzf6i35ch41-python3.10-pycparser-2.21'
|
||||
deleting '/nix/store/04dn9slfqwhqisn1j3jv531lms9w5wlj-python3.10-hypothesis-6.50.1.drv'
|
||||
deleting '/nix/store/04hhx2z1iyi3b48hxykiw1g03lp46jk7-python-remove-bin-bytecode-hook'
|
||||
deleting unused links...
|
||||
note: currently hard linking saves -0.00 MiB
|
||||
190 store paths deleted, 425.51 MiB freed
|
||||
"""
|
||||
|
||||
|
||||
def test_nix_collect_garbage(job(
|
||||
created_at = "2019-12-04",
|
||||
updated_at = "2019-12-04",
|
||||
uid = UUID,
|
||||
type_id = "typeid",
|
||||
name = "name",
|
||||
description: "desc",
|
||||
status = status(CREATED = "CREATED"),
|
||||
)):
|
||||
def test_parse_line():
|
||||
txt = "190 store paths deleted, 425.51 MiB freed"
|
||||
output = (
|
||||
JobStatus.FINISHED,
|
||||
100,
|
||||
CLEAR_COMPLETED,
|
||||
"425.51 MiB have been cleared",
|
||||
)
|
||||
assert parse_line(txt) == output
|
||||
|
||||
assert nix_collect_garbage() is not None
|
||||
|
||||
def test_parse_line_with_blank_line():
|
||||
txt = ""
|
||||
output = (
|
||||
JobStatus.FINISHED,
|
||||
100,
|
||||
COMPLETED_WITH_ERROR,
|
||||
RESULT_WAAS_NOT_FOUND_ERROR,
|
||||
)
|
||||
assert parse_line(txt) == output
|
||||
|
||||
|
||||
def test_get_dead_packages():
|
||||
assert get_dead_packages(output_print_dead) == (5, 20.0)
|
||||
|
||||
|
||||
def test_get_dead_packages_zero():
|
||||
assert get_dead_packages("") == (0, None)
|
||||
|
||||
|
||||
def test_stream_process():
|
||||
log_event = []
|
||||
reference = [
|
||||
(JobStatus.RUNNING, 20, "Сleaning...", ""),
|
||||
(JobStatus.RUNNING, 40, "Сleaning...", ""),
|
||||
(JobStatus.RUNNING, 60, "Сleaning...", ""),
|
||||
(JobStatus.RUNNING, 80, "Сleaning...", ""),
|
||||
(JobStatus.RUNNING, 100, "Сleaning...", ""),
|
||||
(
|
||||
JobStatus.FINISHED,
|
||||
100,
|
||||
"Сleaning completed.",
|
||||
"425.51 MiB have been cleared",
|
||||
),
|
||||
]
|
||||
|
||||
def set_job_status(status, progress, status_text, result=""):
|
||||
log_event.append((status, progress, status_text, result))
|
||||
|
||||
stream_process(output_collect_garbage.split("\n"), 20.0, set_job_status)
|
||||
assert log_event == reference
|
||||
|
||||
|
||||
def test_nix_collect_garbage():
|
||||
log_event = []
|
||||
reference = [
|
||||
(JobStatus.RUNNING, 0, "Сalculate the number of dead packages...", ""),
|
||||
(JobStatus.RUNNING, 0, "Found 5 packages to remove!", ""),
|
||||
(JobStatus.RUNNING, 20, "Сleaning...", ""),
|
||||
(JobStatus.RUNNING, 40, "Сleaning...", ""),
|
||||
(JobStatus.RUNNING, 60, "Сleaning...", ""),
|
||||
(JobStatus.RUNNING, 80, "Сleaning...", ""),
|
||||
(JobStatus.RUNNING, 100, "Сleaning...", ""),
|
||||
(
|
||||
JobStatus.FINISHED,
|
||||
100,
|
||||
"Сleaning completed.",
|
||||
"425.51 MiB have been cleared",
|
||||
),
|
||||
]
|
||||
|
||||
def set_job_status(status="", progress="", status_text="", result=""):
|
||||
log_event.append((status, progress, status_text, result))
|
||||
|
||||
nix_collect_garbage(
|
||||
None,
|
||||
None,
|
||||
lambda: output_print_dead,
|
||||
lambda: output_collect_garbage.split("\n"),
|
||||
set_job_status,
|
||||
)
|
||||
|
||||
assert log_event == reference
|
||||
|
||||
|
||||
def test_nix_collect_garbage_zero_trash():
|
||||
log_event = []
|
||||
reference = [
|
||||
(JobStatus.RUNNING, 0, "Сalculate the number of dead packages...", ""),
|
||||
(JobStatus.FINISHED, 100, "Nothing to clear", "System is clear"),
|
||||
]
|
||||
|
||||
def set_job_status(status="", progress="", status_text="", result=""):
|
||||
log_event.append((status, progress, status_text, result))
|
||||
|
||||
nix_collect_garbage(
|
||||
None,
|
||||
None,
|
||||
lambda: "",
|
||||
lambda: output_collect_garbage.split("\n"),
|
||||
set_job_status,
|
||||
)
|
||||
|
||||
assert log_event == reference
|
||||
|
|
Loading…
Reference in a new issue