mirror of
https://git.selfprivacy.org/SelfPrivacy/selfprivacy-rest-api.git
synced 2024-11-17 16:09:14 +00:00
fix(huey): adapt to new VM test environment
This commit is contained in:
parent
baf7843349
commit
6f38b2309f
|
@ -27,6 +27,7 @@
|
|||
python-lsp-server
|
||||
pyflakes
|
||||
typer # for strawberry
|
||||
types-redis # for mypy
|
||||
] ++ strawberry-graphql.optional-dependencies.cli));
|
||||
|
||||
vmtest-src-dir = "/root/source";
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
"""
|
||||
Redis pool module for selfprivacy_api
|
||||
"""
|
||||
from os import environ
|
||||
import redis
|
||||
|
||||
from selfprivacy_api.utils.singleton_metaclass import SingletonMetaclass
|
||||
|
||||
REDIS_SOCKET = "/run/redis-sp-api/redis.sock"
|
||||
|
||||
|
||||
class RedisPool(metaclass=SingletonMetaclass):
|
||||
"""
|
||||
|
@ -24,19 +26,7 @@ class RedisPool(metaclass=SingletonMetaclass):
|
|||
redis://[[username]:[password]]@localhost:6379/0
|
||||
unix://[username@]/path/to/socket.sock?db=0[&password=password]
|
||||
"""
|
||||
|
||||
if "USE_REDIS_PORT" in environ:
|
||||
port = int(environ["USE_REDIS_PORT"])
|
||||
return f"redis://@127.0.0.1:{port}/{dbnumber}"
|
||||
else:
|
||||
return f"unix://{RedisPool.redis_socket()}?db={dbnumber}"
|
||||
|
||||
@staticmethod
|
||||
def redis_socket() -> str:
|
||||
if "REDIS_SOCKET" in environ:
|
||||
return environ["REDIS_SOCKET"]
|
||||
else:
|
||||
return "/run/redis-sp-api/redis.sock"
|
||||
return f"unix://{REDIS_SOCKET}?db={dbnumber}"
|
||||
|
||||
def get_connection(self):
|
||||
"""
|
||||
|
|
|
@ -1,14 +1,15 @@
|
|||
import pytest
|
||||
|
||||
from subprocess import Popen
|
||||
from os import environ, path
|
||||
import redis
|
||||
from typing import List
|
||||
|
||||
import subprocess
|
||||
from subprocess import Popen, check_output, TimeoutExpired
|
||||
from os import environ, path, set_blocking
|
||||
from io import BufferedReader
|
||||
from huey.exceptions import HueyException
|
||||
|
||||
from selfprivacy_api.utils.huey import huey, immediate, HUEY_DATABASE_NUMBER
|
||||
from selfprivacy_api.utils.redis_pool import RedisPool
|
||||
from selfprivacy_api.utils.waitloop import wait_until_true
|
||||
|
||||
from selfprivacy_api.backup.util import output_yielder
|
||||
from selfprivacy_api.utils.redis_pool import RedisPool, REDIS_SOCKET
|
||||
|
||||
|
||||
@huey.task()
|
||||
|
@ -28,66 +29,28 @@ def flush_huey_redis_forcefully():
|
|||
connection.flushdb()
|
||||
|
||||
|
||||
def start_redis_socket(socket_path):
|
||||
# Socket file will be created by redis
|
||||
command = [
|
||||
"redis-server",
|
||||
"--unixsocket",
|
||||
socket_path,
|
||||
"--unixsocketperm",
|
||||
"700",
|
||||
"--port",
|
||||
"0",
|
||||
]
|
||||
redis_handle = Popen(command)
|
||||
# TODO: may be useful in other places too, move to utils/ tests common if using it somewhere
|
||||
def read_all_ready_output(stream: BufferedReader) -> str:
|
||||
set_blocking(stream.fileno(), False)
|
||||
output: List[bytes] = []
|
||||
while True:
|
||||
line = stream.readline()
|
||||
raise ValueError(line)
|
||||
if line == b"":
|
||||
break
|
||||
else:
|
||||
output.append(line)
|
||||
|
||||
wait_until_true(lambda: path.exists(socket_path), timeout_sec=2)
|
||||
flush_huey_redis_forcefully()
|
||||
set_blocking(stream.fileno(), True)
|
||||
|
||||
return redis_handle
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def redis_socket(tmpdir):
|
||||
# Does NOT overwrite already imported redis pools
|
||||
# -> Not very useful for more involved tests
|
||||
# DOES override imported huey partially, but tries to restore it back
|
||||
|
||||
socket_path = path.join(tmpdir, "redis.sock")
|
||||
environ["REDIS_SOCKET"] = socket_path
|
||||
|
||||
old_port = None
|
||||
if "USE_REDIS_PORT" in environ:
|
||||
old_port = environ["USE_REDIS_PORT"]
|
||||
del environ["USE_REDIS_PORT"]
|
||||
|
||||
assert "USE_REDIS_PORT" not in environ
|
||||
|
||||
old_huey_url = huey.storage_kwargs.get("url")
|
||||
# Overriding url in the already imported singleton
|
||||
huey.storage_kwargs["url"] = RedisPool.connection_url(HUEY_DATABASE_NUMBER)
|
||||
reset_huey_storage()
|
||||
|
||||
redis_handle = start_redis_socket(socket_path)
|
||||
|
||||
yield socket_path
|
||||
|
||||
# Socket file will be destroyed by redis
|
||||
redis_handle.terminate()
|
||||
|
||||
if old_port:
|
||||
environ["USE_REDIS_PORT"] = old_port
|
||||
del environ["REDIS_SOCKET"]
|
||||
|
||||
if old_huey_url:
|
||||
huey.storage_kwargs["url"] = old_huey_url
|
||||
else:
|
||||
del huey.storage_kwargs["url"]
|
||||
reset_huey_storage()
|
||||
result = b"".join(output)
|
||||
return result.decode("utf-8")
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def not_immediate():
|
||||
assert environ["TEST_MODE"] == "true"
|
||||
|
||||
old_immediate = huey.immediate
|
||||
environ["HUEY_QUEUES_FOR_TESTS"] = "Yes"
|
||||
huey.immediate = False
|
||||
|
@ -101,48 +64,32 @@ def not_immediate():
|
|||
|
||||
|
||||
@pytest.fixture()
|
||||
def huey_queues(not_immediate):
|
||||
"""
|
||||
Full, not-immediate, queued huey, with consumer starting and stopping.
|
||||
IMPORTANT: Assumes tests are run from the project directory.
|
||||
The above is needed by consumer to find our huey setup.
|
||||
"""
|
||||
flush_huey_redis_forcefully()
|
||||
command = ["huey_consumer.py", "selfprivacy_api.task_registry.huey"]
|
||||
consumer_handle = Popen(command)
|
||||
|
||||
yield huey
|
||||
|
||||
consumer_handle.kill()
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def huey_queues_socket(not_immediate, redis_socket):
|
||||
def huey_socket_consumer(not_immediate):
|
||||
"""
|
||||
Same as above, but with socketed redis
|
||||
"""
|
||||
|
||||
flush_huey_redis_forcefully()
|
||||
command = ["huey_consumer.py", "selfprivacy_api.task_registry.huey"]
|
||||
consumer_handle = Popen(command)
|
||||
|
||||
assert path.exists(redis_socket)
|
||||
# First assert that consumer does not fail by itself
|
||||
# Idk yet how to do it more elegantly
|
||||
try:
|
||||
check_output(command, timeout=2)
|
||||
except TimeoutExpired:
|
||||
pass
|
||||
|
||||
yield redis_socket
|
||||
# Then open it for real
|
||||
consumer_handle = Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
|
||||
assert path.exists(REDIS_SOCKET)
|
||||
|
||||
yield consumer_handle
|
||||
|
||||
consumer_handle.kill()
|
||||
|
||||
|
||||
def test_huey_over_redis(huey_queues):
|
||||
assert huey.immediate is False
|
||||
assert immediate() is False
|
||||
|
||||
result = sum(2, 5)
|
||||
assert result(blocking=True, timeout=2) == 7
|
||||
|
||||
|
||||
# we cannot have these two fixtures prepared at the same time to iterate through them
|
||||
def test_huey_over_redis_socket(huey_queues_socket):
|
||||
def test_huey_over_redis_socket(huey_socket_consumer):
|
||||
assert huey.immediate is False
|
||||
assert immediate() is False
|
||||
|
||||
|
@ -158,14 +105,28 @@ def test_huey_over_redis_socket(huey_queues_socket):
|
|||
)
|
||||
|
||||
result = sum(2, 5)
|
||||
assert result(blocking=True, timeout=2) == 7
|
||||
try:
|
||||
assert result(blocking=True, timeout=10) == 7
|
||||
|
||||
except HueyException as error:
|
||||
if "timed out" in str(error):
|
||||
output = read_all_ready_output(huey_socket_consumer.stdout)
|
||||
errorstream = read_all_ready_output(huey_socket_consumer.stderr)
|
||||
raise TimeoutError(
|
||||
f"Huey timed out: {str(error)}",
|
||||
f"Consumer output: {output}",
|
||||
f"Consumer errorstream: {errorstream}",
|
||||
)
|
||||
else:
|
||||
raise error
|
||||
|
||||
|
||||
@pytest.mark.xfail(reason="cannot yet schedule with sockets for some reason")
|
||||
def test_huey_schedule(huey_queues_socket):
|
||||
# we do not schedule tasks anywhere, but concerning.
|
||||
result = sum.schedule((2, 5), delay=10)
|
||||
# We do not schedule tasks anywhere, but concerning that it fails.
|
||||
sum.schedule((2, 5), delay=10)
|
||||
|
||||
try:
|
||||
assert len(huey.scheduled()) == 1
|
||||
except assertionerror:
|
||||
raise valueerror("have wrong amount of scheduled tasks", huey.scheduled())
|
||||
except AssertionError:
|
||||
raise ValueError("have wrong amount of scheduled tasks", huey.scheduled())
|
||||
|
|
Loading…
Reference in a new issue