ci/bin: Make ci_run_n_monitor finds dependencies automatically

Use the GraphQL util to find dependencies accurately saving up time for
both developers and runners.

Signed-off-by: Guilherme Gallo <guilherme.gallo@collabora.com>
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/17791>
This commit is contained in:
Guilherme Gallo
2022-07-15 18:17:02 -03:00
committed by Marge Bot
parent caf9900b3d
commit 39cdcea251

View File

@@ -19,10 +19,12 @@ import sys
import time
from concurrent.futures import ThreadPoolExecutor
from functools import partial
from itertools import chain
from typing import Optional
import gitlab
from colorama import Fore, Style
from gitlab_gql import GitlabGQL, create_job_needs_dag, filter_dag
REFRESH_WAIT_LOG = 10
REFRESH_WAIT_JOBS = 6
@@ -41,22 +43,6 @@ STATUS_COLORS = {
"skipped": "",
}
# TODO: This hardcoded list should be replaced by querying the pipeline's
# dependency graph to see which jobs the target jobs need
DEPENDENCIES = [
"debian/x86_build-base",
"debian/x86_build",
"debian/x86_test-base",
"debian/x86_test-gl",
"debian/arm_build",
"debian/arm_test",
"kernel+rootfs_amd64",
"kernel+rootfs_arm64",
"kernel+rootfs_armhf",
"debian-testing",
"debian-arm64",
]
COMPLETED_STATUSES = ["success", "failed"]
@@ -125,10 +111,6 @@ def monitor_pipeline(
statuses = {}
target_statuses = {}
if not dependencies:
dependencies = []
dependencies.extend(DEPENDENCIES)
if target_job:
target_jobs_regex = re.compile(target_job.strip())
@@ -242,7 +224,6 @@ def parse_args() -> None:
+ '--target ".*traces" ',
)
parser.add_argument("--target", metavar="target-job", help="Target job")
parser.add_argument("--deps", nargs="+", help="Job dependencies")
parser.add_argument(
"--rev", metavar="revision", help="repository git revision", required=True
)
@@ -268,6 +249,16 @@ def read_token(token_arg: Optional[str]) -> str:
)
def find_dependencies(target_job: str, project_path: str, sha: str) -> set[str]:
gql_instance = GitlabGQL()
dag, _ = create_job_needs_dag(
gql_instance, {"projectPath": project_path.path_with_namespace, "sha": sha}
)
target_dep_dag = filter_dag(dag, target_job)
deps = set(chain.from_iterable(target_dep_dag.values()))
return deps
if __name__ == "__main__":
try:
t_start = time.perf_counter()
@@ -283,11 +274,14 @@ if __name__ == "__main__":
print(f"Revision: {args.rev}")
pipe = wait_for_pipeline(cur_project, args.rev)
print(f"Pipeline: {pipe.web_url}")
deps = set()
if args.target:
print("🞋 job: " + Fore.BLUE + args.target + Style.RESET_ALL)
print(f"Extra dependencies: {args.deps}")
deps = find_dependencies(
target_job=args.target, sha=args.rev, project_path=cur_project
)
target_job_id, ret = monitor_pipeline(
cur_project, pipe, args.target, args.deps, args.force_manual
cur_project, pipe, args.target, deps, args.force_manual
)
if target_job_id: