2022-06-06 18:25:48 +02:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
# Copyright © 2020 - 2022 Collabora Ltd.
|
|
|
|
# Authors:
|
|
|
|
# Tomeu Vizoso <tomeu.vizoso@collabora.com>
|
|
|
|
# David Heidelberg <david.heidelberg@collabora.com>
|
|
|
|
#
|
2023-03-22 19:23:47 +01:00
|
|
|
# For the dependencies, see the requirements.txt
|
2022-06-06 18:25:48 +02:00
|
|
|
# SPDX-License-Identifier: MIT
|
|
|
|
|
|
|
|
"""
|
|
|
|
Helper script to restrict running only required CI jobs
|
|
|
|
and show the job(s) logs.
|
|
|
|
"""
|
|
|
|
|
2022-07-15 18:15:52 -03:00
|
|
|
import argparse
|
2022-06-06 18:25:48 +02:00
|
|
|
import re
|
|
|
|
import sys
|
2022-07-15 18:15:52 -03:00
|
|
|
import time
|
2023-09-29 10:17:29 -03:00
|
|
|
from collections import defaultdict
|
2022-07-15 18:15:52 -03:00
|
|
|
from concurrent.futures import ThreadPoolExecutor
|
|
|
|
from functools import partial
|
2022-07-15 18:17:02 -03:00
|
|
|
from itertools import chain
|
2023-10-24 00:08:37 -03:00
|
|
|
from subprocess import check_output
|
ci/bin: Print a summary list of dependency and target jobs
We already print all the detected target jobs from regex and its
dependencies. But for more complex regexes the list can be cumbersome,
and an aggregate list of dependencies and targets can be more value, so
add these prints as well.
This is what looks like:
```
Running 10 dependency jobs:
alpine/x86_64_lava_ssh_client, clang-format, debian-arm64,
debian-testing, debian/arm64_build, debian/x86_64_build,
debian/x86_64_build-base, kernel+rootfs_arm64, kernel+rootfs_x86_64,
rustfmt
Running 15 target jobs:
a618_gl 1/4, a660_gl 1/2, intel-tgl-skqp, iris-amly-egl, iris-apl-deqp
1/3, iris-cml-deqp 1/4, iris-glk-deqp 1/2, iris-kbl-deqp 1/3,
lima-mali450-deqp:arm64, lima-mali450-piglit:arm64 1/2,
panfrost-g52-gl:arm64 1/3, panfrost-g72-gl:arm64 1/3,
panfrost-t720-gles2:arm64, panfrost-t860-egl:arm64, zink-anv-tgl
```
Signed-off-by: Guilherme Gallo <guilherme.gallo@collabora.com>
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/25940>
2023-11-01 01:36:07 -03:00
|
|
|
from typing import TYPE_CHECKING, Iterable, Literal, Optional
|
2022-06-06 18:25:48 +02:00
|
|
|
|
2022-07-15 18:15:52 -03:00
|
|
|
import gitlab
|
2024-02-05 22:59:51 +00:00
|
|
|
import gitlab.v4.objects
|
2022-06-06 18:25:48 +02:00
|
|
|
from colorama import Fore, Style
|
2023-10-03 11:28:26 -03:00
|
|
|
from gitlab_common import (
|
2024-01-22 20:16:43 -03:00
|
|
|
GITLAB_URL,
|
|
|
|
TOKEN_DIR,
|
2023-10-18 17:09:48 -03:00
|
|
|
get_gitlab_pipeline_from_url,
|
2024-01-22 20:16:43 -03:00
|
|
|
get_gitlab_project,
|
|
|
|
get_token_from_default_dir,
|
|
|
|
pretty_duration,
|
2023-10-03 11:28:26 -03:00
|
|
|
read_token,
|
|
|
|
wait_for_pipeline,
|
|
|
|
)
|
2022-08-02 19:01:32 -03:00
|
|
|
from gitlab_gql import GitlabGQL, create_job_needs_dag, filter_dag, print_dag
|
2022-06-06 18:25:48 +02:00
|
|
|
|
ci/bin: Print a summary list of dependency and target jobs
We already print all the detected target jobs from regex and its
dependencies. But for more complex regexes the list can be cumbersome,
and an aggregate list of dependencies and targets can be more value, so
add these prints as well.
This is what looks like:
```
Running 10 dependency jobs:
alpine/x86_64_lava_ssh_client, clang-format, debian-arm64,
debian-testing, debian/arm64_build, debian/x86_64_build,
debian/x86_64_build-base, kernel+rootfs_arm64, kernel+rootfs_x86_64,
rustfmt
Running 15 target jobs:
a618_gl 1/4, a660_gl 1/2, intel-tgl-skqp, iris-amly-egl, iris-apl-deqp
1/3, iris-cml-deqp 1/4, iris-glk-deqp 1/2, iris-kbl-deqp 1/3,
lima-mali450-deqp:arm64, lima-mali450-piglit:arm64 1/2,
panfrost-g52-gl:arm64 1/3, panfrost-g72-gl:arm64 1/3,
panfrost-t720-gles2:arm64, panfrost-t860-egl:arm64, zink-anv-tgl
```
Signed-off-by: Guilherme Gallo <guilherme.gallo@collabora.com>
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/25940>
2023-11-01 01:36:07 -03:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
from gitlab_gql import Dag
|
|
|
|
|
2022-06-06 18:25:48 +02:00
|
|
|
REFRESH_WAIT_LOG = 10
|
|
|
|
REFRESH_WAIT_JOBS = 6
|
|
|
|
|
|
|
|
URL_START = "\033]8;;"
|
|
|
|
URL_END = "\033]8;;\a"
|
|
|
|
|
|
|
|
STATUS_COLORS = {
|
|
|
|
"created": "",
|
|
|
|
"running": Fore.BLUE,
|
|
|
|
"success": Fore.GREEN,
|
|
|
|
"failed": Fore.RED,
|
|
|
|
"canceled": Fore.MAGENTA,
|
|
|
|
"manual": "",
|
|
|
|
"pending": "",
|
|
|
|
"skipped": "",
|
|
|
|
}
|
|
|
|
|
|
|
|
COMPLETED_STATUSES = ["success", "failed"]
|
|
|
|
|
|
|
|
|
2023-09-29 18:43:24 -03:00
|
|
|
def print_job_status(job, new_status=False) -> None:
|
2022-06-06 18:25:48 +02:00
|
|
|
"""It prints a nice, colored job status with a link to the job."""
|
|
|
|
if job.status == "canceled":
|
|
|
|
return
|
|
|
|
|
2024-02-01 18:45:59 +00:00
|
|
|
if new_status and job.status == "created":
|
|
|
|
return
|
|
|
|
|
2023-10-03 11:28:26 -03:00
|
|
|
if job.duration:
|
|
|
|
duration = job.duration
|
|
|
|
elif job.started_at:
|
|
|
|
duration = time.perf_counter() - time.mktime(job.started_at.timetuple())
|
|
|
|
|
2022-06-06 18:25:48 +02:00
|
|
|
print(
|
|
|
|
STATUS_COLORS[job.status]
|
|
|
|
+ "🞋 job "
|
|
|
|
+ URL_START
|
|
|
|
+ f"{job.web_url}\a{job.name}"
|
|
|
|
+ URL_END
|
2023-09-29 18:43:24 -03:00
|
|
|
+ (f" has new status: {job.status}" if new_status else f" :: {job.status}")
|
2023-10-03 11:28:26 -03:00
|
|
|
+ (f" ({pretty_duration(duration)})" if job.started_at else "")
|
2022-06-06 18:25:48 +02:00
|
|
|
+ Style.RESET_ALL
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def pretty_wait(sec: int) -> None:
|
|
|
|
"""shows progressbar in dots"""
|
|
|
|
for val in range(sec, 0, -1):
|
|
|
|
print(f"⏲ {val} seconds", end="\r")
|
|
|
|
time.sleep(1)
|
|
|
|
|
|
|
|
|
|
|
|
def monitor_pipeline(
|
2022-08-11 15:59:05 +02:00
|
|
|
project,
|
|
|
|
pipeline,
|
2023-11-04 14:52:41 +00:00
|
|
|
target_jobs_regex: re.Pattern,
|
2022-08-11 15:59:05 +02:00
|
|
|
dependencies,
|
|
|
|
force_manual: bool,
|
2023-09-29 23:31:30 -03:00
|
|
|
stress: int,
|
2022-06-06 18:25:48 +02:00
|
|
|
) -> tuple[Optional[int], Optional[int]]:
|
|
|
|
"""Monitors pipeline and delegate canceling jobs"""
|
2023-09-29 20:47:00 -03:00
|
|
|
statuses: dict[str, str] = defaultdict(str)
|
|
|
|
target_statuses: dict[str, str] = defaultdict(str)
|
2023-09-29 10:17:29 -03:00
|
|
|
stress_status_counter = defaultdict(lambda: defaultdict(int))
|
2023-09-29 20:47:00 -03:00
|
|
|
target_id = None
|
2022-06-06 18:25:48 +02:00
|
|
|
|
|
|
|
while True:
|
2023-11-10 18:41:42 -03:00
|
|
|
deps_failed = []
|
2022-06-06 18:25:48 +02:00
|
|
|
to_cancel = []
|
|
|
|
for job in pipeline.jobs.list(all=True, sort="desc"):
|
|
|
|
# target jobs
|
2023-11-06 12:46:47 +00:00
|
|
|
if target_jobs_regex.fullmatch(job.name):
|
2023-09-29 20:47:00 -03:00
|
|
|
target_id = job.id
|
2022-06-06 18:25:48 +02:00
|
|
|
|
2022-08-11 15:59:05 +02:00
|
|
|
if stress and job.status in ["success", "failed"]:
|
2023-09-29 23:31:30 -03:00
|
|
|
if (
|
|
|
|
stress < 0
|
|
|
|
or sum(stress_status_counter[job.name].values()) < stress
|
|
|
|
):
|
2024-02-05 22:59:51 +00:00
|
|
|
job = enable_job(project, pipeline, job, "retry", force_manual)
|
2023-09-29 23:31:30 -03:00
|
|
|
stress_status_counter[job.name][job.status] += 1
|
2023-09-29 20:47:00 -03:00
|
|
|
else:
|
2024-02-05 22:59:51 +00:00
|
|
|
job = enable_job(project, pipeline, job, "target", force_manual)
|
2022-08-11 15:59:05 +02:00
|
|
|
|
2023-09-29 20:47:00 -03:00
|
|
|
print_job_status(job, job.status not in target_statuses[job.name])
|
|
|
|
target_statuses[job.name] = job.status
|
2022-06-06 18:25:48 +02:00
|
|
|
continue
|
|
|
|
|
|
|
|
# all jobs
|
2023-09-29 20:47:00 -03:00
|
|
|
if job.status != statuses[job.name]:
|
2023-09-29 18:43:24 -03:00
|
|
|
print_job_status(job, True)
|
2023-09-29 20:47:00 -03:00
|
|
|
statuses[job.name] = job.status
|
2022-06-06 18:25:48 +02:00
|
|
|
|
2023-09-29 20:47:00 -03:00
|
|
|
# run dependencies and cancel the rest
|
2022-06-06 18:25:48 +02:00
|
|
|
if job.name in dependencies:
|
2024-02-05 22:59:51 +00:00
|
|
|
job = enable_job(project, pipeline, job, "dep", True)
|
2023-11-10 18:41:42 -03:00
|
|
|
if job.status == "failed":
|
|
|
|
deps_failed.append(job.name)
|
2023-09-29 20:47:00 -03:00
|
|
|
else:
|
2022-06-06 18:25:48 +02:00
|
|
|
to_cancel.append(job)
|
|
|
|
|
2023-09-29 18:53:19 -03:00
|
|
|
cancel_jobs(project, to_cancel)
|
2022-06-06 18:25:48 +02:00
|
|
|
|
2022-08-11 15:59:05 +02:00
|
|
|
if stress:
|
2023-09-29 23:31:30 -03:00
|
|
|
enough = True
|
2023-09-29 10:17:29 -03:00
|
|
|
for job_name, status in stress_status_counter.items():
|
|
|
|
print(
|
|
|
|
f"{job_name}\tsucc: {status['success']}; "
|
|
|
|
f"fail: {status['failed']}; "
|
2023-09-29 23:31:30 -03:00
|
|
|
f"total: {sum(status.values())} of {stress}",
|
2023-09-29 10:17:29 -03:00
|
|
|
flush=False,
|
|
|
|
)
|
2023-09-29 23:31:30 -03:00
|
|
|
if stress < 0 or sum(status.values()) < stress:
|
|
|
|
enough = False
|
|
|
|
|
|
|
|
if not enough:
|
|
|
|
pretty_wait(REFRESH_WAIT_JOBS)
|
|
|
|
continue
|
2022-08-11 15:59:05 +02:00
|
|
|
|
2022-06-06 18:25:48 +02:00
|
|
|
print("---------------------------------", flush=False)
|
|
|
|
|
|
|
|
if len(target_statuses) == 1 and {"running"}.intersection(
|
|
|
|
target_statuses.values()
|
|
|
|
):
|
2023-09-29 20:47:00 -03:00
|
|
|
return target_id, None
|
2022-06-06 18:25:48 +02:00
|
|
|
|
2023-10-20 08:52:12 -03:00
|
|
|
if (
|
|
|
|
{"failed"}.intersection(target_statuses.values())
|
|
|
|
and not set(["running", "pending"]).intersection(target_statuses.values())
|
|
|
|
):
|
2022-06-06 18:25:48 +02:00
|
|
|
return None, 1
|
|
|
|
|
2023-11-10 18:41:42 -03:00
|
|
|
if (
|
|
|
|
{"skipped"}.intersection(target_statuses.values())
|
|
|
|
and not {"running", "pending"}.intersection(target_statuses.values())
|
|
|
|
):
|
|
|
|
print(
|
|
|
|
Fore.RED,
|
|
|
|
"Target in skipped state, aborting. Failed dependencies:",
|
|
|
|
deps_failed,
|
|
|
|
Fore.RESET,
|
|
|
|
)
|
|
|
|
return None, 1
|
|
|
|
|
2022-06-06 18:25:48 +02:00
|
|
|
if {"success", "manual"}.issuperset(target_statuses.values()):
|
|
|
|
return None, 0
|
|
|
|
|
|
|
|
pretty_wait(REFRESH_WAIT_JOBS)
|
|
|
|
|
|
|
|
|
2024-02-07 16:43:29 +00:00
|
|
|
def get_pipeline_job(
|
|
|
|
pipeline: gitlab.v4.objects.ProjectPipeline,
|
|
|
|
id: int,
|
|
|
|
) -> gitlab.v4.objects.ProjectPipelineJob:
|
|
|
|
pipeline_jobs = pipeline.jobs.list(all=True)
|
|
|
|
return [j for j in pipeline_jobs if j.id == id][0]
|
|
|
|
|
|
|
|
|
2023-09-29 20:47:00 -03:00
|
|
|
def enable_job(
|
2024-02-05 22:59:51 +00:00
|
|
|
project: gitlab.v4.objects.Project,
|
2024-02-05 22:59:51 +00:00
|
|
|
pipeline: gitlab.v4.objects.ProjectPipeline,
|
2024-02-05 22:59:51 +00:00
|
|
|
job: gitlab.v4.objects.ProjectPipelineJob,
|
|
|
|
action_type: Literal["target", "dep", "retry"],
|
|
|
|
force_manual: bool,
|
2024-02-05 22:59:51 +00:00
|
|
|
) -> gitlab.v4.objects.ProjectPipelineJob:
|
2023-09-29 20:32:48 -03:00
|
|
|
"""enable job"""
|
2023-09-29 20:47:00 -03:00
|
|
|
if (
|
|
|
|
(job.status in ["success", "failed"] and action_type != "retry")
|
|
|
|
or (job.status == "manual" and not force_manual)
|
|
|
|
or job.status in ["skipped", "running", "created", "pending"]
|
|
|
|
):
|
2024-02-05 22:59:51 +00:00
|
|
|
return job
|
2023-09-29 20:47:00 -03:00
|
|
|
|
2022-06-06 18:25:48 +02:00
|
|
|
pjob = project.jobs.get(job.id, lazy=True)
|
2023-09-29 20:32:48 -03:00
|
|
|
|
|
|
|
if job.status in ["success", "failed", "canceled"]:
|
2024-02-05 22:59:51 +00:00
|
|
|
new_job = pjob.retry()
|
|
|
|
job = get_pipeline_job(pipeline, new_job["id"])
|
2023-09-29 20:32:48 -03:00
|
|
|
else:
|
|
|
|
pjob.play()
|
2024-02-05 23:20:06 +00:00
|
|
|
job = get_pipeline_job(pipeline, pjob.id)
|
2023-09-29 20:32:48 -03:00
|
|
|
|
|
|
|
if action_type == "target":
|
2022-06-06 18:25:48 +02:00
|
|
|
jtype = "🞋 "
|
2023-09-29 20:32:48 -03:00
|
|
|
elif action_type == "retry":
|
|
|
|
jtype = "↻"
|
2022-06-06 18:25:48 +02:00
|
|
|
else:
|
|
|
|
jtype = "(dependency)"
|
|
|
|
|
2022-08-11 15:59:05 +02:00
|
|
|
print(Fore.MAGENTA + f"{jtype} job {job.name} manually enabled" + Style.RESET_ALL)
|
|
|
|
|
2024-02-05 22:59:51 +00:00
|
|
|
return job
|
|
|
|
|
2022-08-11 15:59:05 +02:00
|
|
|
|
2022-06-06 18:25:48 +02:00
|
|
|
def cancel_job(project, job) -> None:
|
|
|
|
"""Cancel GitLab job"""
|
2023-09-29 20:47:00 -03:00
|
|
|
if job.status in [
|
|
|
|
"canceled",
|
|
|
|
"success",
|
|
|
|
"failed",
|
|
|
|
"skipped",
|
|
|
|
]:
|
|
|
|
return
|
2022-06-06 18:25:48 +02:00
|
|
|
pjob = project.jobs.get(job.id, lazy=True)
|
|
|
|
pjob.cancel()
|
2023-03-04 21:16:09 +01:00
|
|
|
print(f"♲ {job.name}", end=" ")
|
2022-06-06 18:25:48 +02:00
|
|
|
|
|
|
|
|
|
|
|
def cancel_jobs(project, to_cancel) -> None:
|
|
|
|
"""Cancel unwanted GitLab jobs"""
|
|
|
|
if not to_cancel:
|
|
|
|
return
|
|
|
|
|
|
|
|
with ThreadPoolExecutor(max_workers=6) as exe:
|
|
|
|
part = partial(cancel_job, project)
|
|
|
|
exe.map(part, to_cancel)
|
2023-03-04 21:16:09 +01:00
|
|
|
print()
|
2022-06-06 18:25:48 +02:00
|
|
|
|
|
|
|
|
|
|
|
def print_log(project, job_id) -> None:
|
|
|
|
"""Print job log into output"""
|
|
|
|
printed_lines = 0
|
|
|
|
while True:
|
|
|
|
job = project.jobs.get(job_id)
|
|
|
|
|
|
|
|
# GitLab's REST API doesn't offer pagination for logs, so we have to refetch it all
|
2022-12-12 00:48:05 +01:00
|
|
|
lines = job.trace().decode("raw_unicode_escape").splitlines()
|
2022-06-06 18:25:48 +02:00
|
|
|
for line in lines[printed_lines:]:
|
|
|
|
print(line)
|
|
|
|
printed_lines = len(lines)
|
|
|
|
|
|
|
|
if job.status in COMPLETED_STATUSES:
|
|
|
|
print(Fore.GREEN + f"Job finished: {job.web_url}" + Style.RESET_ALL)
|
|
|
|
return
|
|
|
|
pretty_wait(REFRESH_WAIT_LOG)
|
|
|
|
|
|
|
|
|
|
|
|
def parse_args() -> None:
|
|
|
|
"""Parse args"""
|
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
description="Tool to trigger a subset of container jobs "
|
|
|
|
+ "and monitor the progress of a test job",
|
|
|
|
epilog="Example: mesa-monitor.py --rev $(git rev-parse HEAD) "
|
|
|
|
+ '--target ".*traces" ',
|
|
|
|
)
|
2023-09-29 09:40:32 -03:00
|
|
|
parser.add_argument(
|
|
|
|
"--target",
|
|
|
|
metavar="target-job",
|
|
|
|
help="Target job regex. For multiple targets, separate with pipe | character",
|
2023-09-29 18:53:19 -03:00
|
|
|
required=True,
|
2024-01-24 23:21:29 +00:00
|
|
|
nargs=argparse.ONE_OR_MORE,
|
2023-09-29 09:40:32 -03:00
|
|
|
)
|
2022-06-06 18:25:48 +02:00
|
|
|
parser.add_argument(
|
|
|
|
"--token",
|
|
|
|
metavar="token",
|
2024-01-22 20:16:43 -03:00
|
|
|
type=str,
|
|
|
|
default=get_token_from_default_dir(),
|
|
|
|
help="Use the provided GitLab token or token file, "
|
|
|
|
f"otherwise it's read from {TOKEN_DIR / 'gitlab-token'}",
|
2022-06-06 18:25:48 +02:00
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--force-manual", action="store_true", help="Force jobs marked as manual"
|
|
|
|
)
|
2023-09-29 23:31:30 -03:00
|
|
|
parser.add_argument(
|
|
|
|
"--stress",
|
|
|
|
default=0,
|
|
|
|
type=int,
|
|
|
|
help="Stresstest job(s). Number or repetitions or -1 for infinite.",
|
|
|
|
)
|
2023-09-29 22:41:58 -03:00
|
|
|
parser.add_argument(
|
|
|
|
"--project",
|
|
|
|
default="mesa",
|
|
|
|
help="GitLab project in the format <user>/<project> or just <project>",
|
|
|
|
)
|
2023-07-28 13:09:24 +01:00
|
|
|
|
|
|
|
mutex_group1 = parser.add_mutually_exclusive_group()
|
|
|
|
mutex_group1.add_argument(
|
2023-10-20 12:11:13 +02:00
|
|
|
"--rev", default="HEAD", metavar="revision", help="repository git revision (default: HEAD)"
|
2023-07-28 13:09:24 +01:00
|
|
|
)
|
|
|
|
mutex_group1.add_argument(
|
|
|
|
"--pipeline-url",
|
|
|
|
help="URL of the pipeline to use, instead of auto-detecting it.",
|
|
|
|
)
|
2023-11-27 17:47:58 +00:00
|
|
|
mutex_group1.add_argument(
|
|
|
|
"--mr",
|
|
|
|
type=int,
|
|
|
|
help="ID of a merge request; the latest pipeline in that MR will be used.",
|
|
|
|
)
|
2023-07-28 13:09:24 +01:00
|
|
|
|
2023-08-29 18:15:47 +01:00
|
|
|
args = parser.parse_args()
|
|
|
|
|
|
|
|
# argparse doesn't support groups inside add_mutually_exclusive_group(),
|
|
|
|
# which means we can't just put `--project` and `--rev` in a group together,
|
|
|
|
# we have to do this by heand instead.
|
|
|
|
if args.pipeline_url and args.project != parser.get_default("project"):
|
|
|
|
# weird phrasing but it's the error add_mutually_exclusive_group() gives
|
|
|
|
parser.error("argument --project: not allowed with argument --pipeline-url")
|
|
|
|
|
|
|
|
return args
|
2022-06-06 18:25:48 +02:00
|
|
|
|
|
|
|
|
ci/bin: Print a summary list of dependency and target jobs
We already print all the detected target jobs from regex and its
dependencies. But for more complex regexes the list can be cumbersome,
and an aggregate list of dependencies and targets can be more value, so
add these prints as well.
This is what looks like:
```
Running 10 dependency jobs:
alpine/x86_64_lava_ssh_client, clang-format, debian-arm64,
debian-testing, debian/arm64_build, debian/x86_64_build,
debian/x86_64_build-base, kernel+rootfs_arm64, kernel+rootfs_x86_64,
rustfmt
Running 15 target jobs:
a618_gl 1/4, a660_gl 1/2, intel-tgl-skqp, iris-amly-egl, iris-apl-deqp
1/3, iris-cml-deqp 1/4, iris-glk-deqp 1/2, iris-kbl-deqp 1/3,
lima-mali450-deqp:arm64, lima-mali450-piglit:arm64 1/2,
panfrost-g52-gl:arm64 1/3, panfrost-g72-gl:arm64 1/3,
panfrost-t720-gles2:arm64, panfrost-t860-egl:arm64, zink-anv-tgl
```
Signed-off-by: Guilherme Gallo <guilherme.gallo@collabora.com>
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/25940>
2023-11-01 01:36:07 -03:00
|
|
|
def print_detected_jobs(
|
|
|
|
target_dep_dag: "Dag", dependency_jobs: Iterable[str], target_jobs: Iterable[str]
|
|
|
|
) -> None:
|
|
|
|
def print_job_set(color: str, kind: str, job_set: Iterable[str]):
|
|
|
|
print(
|
|
|
|
color + f"Running {len(job_set)} {kind} jobs: ",
|
|
|
|
"\n",
|
|
|
|
", ".join(sorted(job_set)),
|
|
|
|
Fore.RESET,
|
|
|
|
"\n",
|
|
|
|
)
|
|
|
|
|
|
|
|
print(Fore.YELLOW + "Detected target job and its dependencies:", "\n")
|
|
|
|
print_dag(target_dep_dag)
|
|
|
|
print_job_set(Fore.MAGENTA, "dependency", dependency_jobs)
|
|
|
|
print_job_set(Fore.BLUE, "target", target_jobs)
|
|
|
|
|
|
|
|
|
2024-01-22 20:13:54 -03:00
|
|
|
def find_dependencies(token: str | None,
|
|
|
|
target_jobs_regex: re.Pattern,
|
|
|
|
project_path: str,
|
|
|
|
iid: int) -> set[str]:
|
|
|
|
"""
|
|
|
|
Find the dependencies of the target jobs in a GitLab pipeline.
|
|
|
|
|
|
|
|
This function uses the GitLab GraphQL API to fetch the job dependency graph
|
|
|
|
of a pipeline, filters the graph to only include the target jobs and their
|
|
|
|
dependencies, and returns the names of these jobs.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
token (str | None): The GitLab API token. If None, the API is accessed without
|
|
|
|
authentication.
|
|
|
|
target_jobs_regex (re.Pattern): A regex pattern to match the names of the target jobs.
|
|
|
|
project_path (str): The path of the GitLab project.
|
|
|
|
iid (int): The internal ID of the pipeline.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
set[str]: A set of the names of the target jobs and their dependencies.
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
SystemExit: If no target jobs are found in the pipeline.
|
|
|
|
"""
|
|
|
|
gql_instance = GitlabGQL(token=token)
|
2023-10-28 00:45:03 -03:00
|
|
|
dag = create_job_needs_dag(
|
2023-10-24 00:08:37 -03:00
|
|
|
gql_instance, {"projectPath": project_path.path_with_namespace, "iid": iid}
|
2022-07-15 18:17:02 -03:00
|
|
|
)
|
2022-08-02 19:01:32 -03:00
|
|
|
|
2023-11-04 14:52:41 +00:00
|
|
|
target_dep_dag = filter_dag(dag, target_jobs_regex)
|
2022-11-19 21:30:39 +01:00
|
|
|
if not target_dep_dag:
|
|
|
|
print(Fore.RED + "The job(s) were not found in the pipeline." + Fore.RESET)
|
|
|
|
sys.exit(1)
|
2023-10-28 00:45:03 -03:00
|
|
|
|
|
|
|
dependency_jobs = set(chain.from_iterable(d["needs"] for d in target_dep_dag.values()))
|
|
|
|
target_jobs = set(target_dep_dag.keys())
|
ci/bin: Print a summary list of dependency and target jobs
We already print all the detected target jobs from regex and its
dependencies. But for more complex regexes the list can be cumbersome,
and an aggregate list of dependencies and targets can be more value, so
add these prints as well.
This is what looks like:
```
Running 10 dependency jobs:
alpine/x86_64_lava_ssh_client, clang-format, debian-arm64,
debian-testing, debian/arm64_build, debian/x86_64_build,
debian/x86_64_build-base, kernel+rootfs_arm64, kernel+rootfs_x86_64,
rustfmt
Running 15 target jobs:
a618_gl 1/4, a660_gl 1/2, intel-tgl-skqp, iris-amly-egl, iris-apl-deqp
1/3, iris-cml-deqp 1/4, iris-glk-deqp 1/2, iris-kbl-deqp 1/3,
lima-mali450-deqp:arm64, lima-mali450-piglit:arm64 1/2,
panfrost-g52-gl:arm64 1/3, panfrost-g72-gl:arm64 1/3,
panfrost-t720-gles2:arm64, panfrost-t860-egl:arm64, zink-anv-tgl
```
Signed-off-by: Guilherme Gallo <guilherme.gallo@collabora.com>
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/25940>
2023-11-01 01:36:07 -03:00
|
|
|
print_detected_jobs(target_dep_dag, dependency_jobs, target_jobs)
|
2023-10-28 00:45:03 -03:00
|
|
|
return target_jobs.union(dependency_jobs)
|
2022-07-15 18:17:02 -03:00
|
|
|
|
|
|
|
|
2022-06-06 18:25:48 +02:00
|
|
|
if __name__ == "__main__":
|
|
|
|
try:
|
|
|
|
t_start = time.perf_counter()
|
|
|
|
|
|
|
|
args = parse_args()
|
|
|
|
|
|
|
|
token = read_token(args.token)
|
|
|
|
|
2023-05-24 22:58:28 +01:00
|
|
|
gl = gitlab.Gitlab(url=GITLAB_URL,
|
2022-12-01 10:49:56 +00:00
|
|
|
private_token=token,
|
|
|
|
retry_transient_errors=True)
|
2022-06-06 18:25:48 +02:00
|
|
|
|
2022-12-16 00:36:13 +01:00
|
|
|
REV: str = args.rev
|
2023-05-24 22:58:28 +01:00
|
|
|
|
|
|
|
if args.pipeline_url:
|
2023-10-18 17:09:48 -03:00
|
|
|
pipe, cur_project = get_gitlab_pipeline_from_url(gl, args.pipeline_url)
|
2023-07-28 13:09:24 +01:00
|
|
|
REV = pipe.sha
|
2023-05-24 22:58:28 +01:00
|
|
|
else:
|
2023-10-19 12:03:34 +02:00
|
|
|
mesa_project = gl.projects.get("mesa/mesa")
|
2023-11-27 17:47:58 +00:00
|
|
|
projects = [mesa_project]
|
|
|
|
if args.mr:
|
|
|
|
REV = mesa_project.mergerequests.get(args.mr).sha
|
|
|
|
else:
|
|
|
|
REV = check_output(['git', 'rev-parse', REV]).decode('ascii').strip()
|
2024-01-10 11:49:19 +00:00
|
|
|
|
|
|
|
if args.rev == 'HEAD':
|
|
|
|
branch_name = check_output([
|
|
|
|
'git', 'symbolic-ref', '-q', 'HEAD',
|
|
|
|
]).decode('ascii').strip()
|
|
|
|
|
|
|
|
tracked_remote = check_output([
|
|
|
|
'git', 'for-each-ref', '--format=%(upstream)',
|
|
|
|
branch_name,
|
|
|
|
]).decode('ascii').strip()
|
|
|
|
|
|
|
|
remote_rev = check_output([
|
|
|
|
'git', 'rev-parse', tracked_remote,
|
|
|
|
]).decode('ascii').strip()
|
|
|
|
|
|
|
|
if REV != remote_rev:
|
|
|
|
print(
|
|
|
|
f"Local HEAD commit {REV[:10]} is different than "
|
|
|
|
f"tracked remote HEAD commit {remote_rev[:10]}"
|
|
|
|
)
|
|
|
|
print("Did you forget to `git push` ?")
|
|
|
|
|
2023-11-27 17:47:58 +00:00
|
|
|
projects.append(get_gitlab_project(gl, args.project))
|
|
|
|
(pipe, cur_project) = wait_for_pipeline(projects, REV)
|
2023-07-28 13:09:24 +01:00
|
|
|
|
|
|
|
print(f"Revision: {REV}")
|
2022-06-06 18:25:48 +02:00
|
|
|
print(f"Pipeline: {pipe.web_url}")
|
2023-05-24 22:58:28 +01:00
|
|
|
|
2024-01-24 23:21:29 +00:00
|
|
|
target = '|'.join(args.target)
|
|
|
|
target_jobs_regex = re.compile(target.strip())
|
2023-11-04 14:52:41 +00:00
|
|
|
|
2022-07-15 18:17:02 -03:00
|
|
|
deps = set()
|
2024-01-24 23:21:29 +00:00
|
|
|
print("🞋 job: " + Fore.BLUE + target + Style.RESET_ALL)
|
2024-01-24 23:19:33 +00:00
|
|
|
deps = find_dependencies(
|
2024-01-22 20:13:54 -03:00
|
|
|
token=token,
|
|
|
|
target_jobs_regex=target_jobs_regex,
|
|
|
|
iid=pipe.iid,
|
|
|
|
project_path=cur_project
|
2024-01-24 23:19:33 +00:00
|
|
|
)
|
2022-06-06 18:25:48 +02:00
|
|
|
target_job_id, ret = monitor_pipeline(
|
2023-11-04 14:52:41 +00:00
|
|
|
cur_project, pipe, target_jobs_regex, deps, args.force_manual, args.stress
|
2022-06-06 18:25:48 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
if target_job_id:
|
|
|
|
print_log(cur_project, target_job_id)
|
|
|
|
|
|
|
|
t_end = time.perf_counter()
|
|
|
|
spend_minutes = (t_end - t_start) / 60
|
|
|
|
print(f"⏲ Duration of script execution: {spend_minutes:0.1f} minutes")
|
|
|
|
|
|
|
|
sys.exit(ret)
|
|
|
|
except KeyboardInterrupt:
|
|
|
|
sys.exit(1)
|