
LavaFarm is a class created to handle the different types of LAVA farms and their tags in Mesa CI. Since specific jobs may require different types of LAVA farms to run on, it is essential to determine which farm the runner is running on to configure the job correctly. LavaFarm provides an easy-to-use interface for checking the runner tag and returning the corresponding LAVA farm, making it simple for Mesa CI to configure jobs appropriately. By adding tests for LavaFarm, the team can ensure that this class is functioning as expected, allowing for the smooth execution of Mesa CI jobs on the correct LAVA farm. The tests ensure that get_lava_farm returns the correct LavaFarm value when given invalid or valid tags and that it returns LavaFarm.UNKNOWN when no tag is provided. The tests use Hypothesis strategies to generate various labels and farms for testing. Example of use: ``` from lava.utils.lava_farm import LavaFarm, get_lava_farm lava_farm = get_lava_farm() if lava_farm == LavaFarm.DUMMY: # Configure the job for the DUMMY farm ... elif lava_farm == LavaFarm.COLLABORA: # Configure the job for the COLLABORA farm ... elif lava_farm == LavaFarm.KERNELCI: # Configure the job for the KERNELCI farm ... else: # Handle the case where the LAVA farm is unknown ... ``` Signed-off-by: Guilherme Gallo <guilherme.gallo@collabora.com> Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/21325>
60 lines
1.6 KiB
Python
60 lines
1.6 KiB
Python
from unittest.mock import MagicMock, patch
|
|
|
|
import pytest
|
|
import yaml
|
|
from freezegun import freeze_time
|
|
from hypothesis import settings
|
|
|
|
from .lava.helpers import generate_testsuite_result, jobs_logs_response
|
|
|
|
settings.register_profile("ci", max_examples=1000, derandomize=True)
|
|
settings.load_profile("ci")
|
|
|
|
def pytest_configure(config):
|
|
config.addinivalue_line(
|
|
"markers", "slow: marks tests as slow (deselect with '-m \"not slow\"')"
|
|
)
|
|
|
|
@pytest.fixture
|
|
def mock_sleep():
|
|
"""Mock time.sleep to make test faster"""
|
|
with patch("time.sleep", return_value=None):
|
|
yield
|
|
|
|
|
|
@pytest.fixture
|
|
def frozen_time(mock_sleep):
|
|
with freeze_time() as frozen_time:
|
|
yield frozen_time
|
|
|
|
|
|
RESULT_GET_TESTJOB_RESULTS = [{"metadata": {"result": "test"}}]
|
|
|
|
|
|
@pytest.fixture
|
|
def mock_proxy():
|
|
def create_proxy_mock(
|
|
job_results=RESULT_GET_TESTJOB_RESULTS,
|
|
testsuite_results=[generate_testsuite_result()],
|
|
**kwargs
|
|
):
|
|
proxy_mock = MagicMock()
|
|
proxy_submit_mock = proxy_mock.scheduler.jobs.submit
|
|
proxy_submit_mock.return_value = "1234"
|
|
|
|
proxy_results_mock = proxy_mock.results.get_testjob_results_yaml
|
|
proxy_results_mock.return_value = yaml.safe_dump(job_results)
|
|
|
|
proxy_test_suites_mock = proxy_mock.results.get_testsuite_results_yaml
|
|
proxy_test_suites_mock.return_value = yaml.safe_dump(testsuite_results)
|
|
|
|
proxy_logs_mock = proxy_mock.scheduler.jobs.logs
|
|
proxy_logs_mock.return_value = jobs_logs_response()
|
|
|
|
for key, value in kwargs.items():
|
|
setattr(proxy_logs_mock, key, value)
|
|
|
|
return proxy_mock
|
|
|
|
yield create_proxy_mock
|