ci/bin: Use iid instead of SHA in gitlab_gql

We were using sha to fetch the pipeline from GraphQL, but that leads to
wrong results when MR and branch pipelines exist. For example,
using pipeline-url as the MR pipeline:
- https://gitlab.freedesktop.org/gallo/mesa/-/pipelines/1017182
This would lead into the branch pipeline:
- https://gitlab.freedesktop.org/gallo/mesa/-/pipelines/1013189

Also simplify the GQL query, it had lots of unused data.

Signed-off-by: Guilherme Gallo <guilherme.gallo@collabora.com>
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/25858>
This commit is contained in:
Guilherme Gallo
2023-10-24 00:08:37 -03:00
committed by Marge Bot
parent 49b3118302
commit c7b67d8619
2 changed files with 7 additions and 62 deletions

View File

@@ -14,13 +14,13 @@ and show the job(s) logs.
import argparse import argparse
import re import re
from subprocess import check_output
import sys import sys
import time import time
from collections import defaultdict from collections import defaultdict
from concurrent.futures import ThreadPoolExecutor from concurrent.futures import ThreadPoolExecutor
from functools import partial from functools import partial
from itertools import chain from itertools import chain
from subprocess import check_output
from typing import Literal, Optional from typing import Literal, Optional
import gitlab import gitlab
@@ -295,10 +295,10 @@ def parse_args() -> None:
return args return args
def find_dependencies(target_job: str, project_path: str, sha: str) -> set[str]: def find_dependencies(target_job: str, project_path: str, iid: int) -> set[str]:
gql_instance = GitlabGQL() gql_instance = GitlabGQL()
dag, _ = create_job_needs_dag( dag, _ = create_job_needs_dag(
gql_instance, {"projectPath": project_path.path_with_namespace, "sha": sha} gql_instance, {"projectPath": project_path.path_with_namespace, "iid": iid}
) )
target_dep_dag = filter_dag(dag, target_job) target_dep_dag = filter_dag(dag, target_job)
@@ -352,7 +352,7 @@ if __name__ == "__main__":
if args.target: if args.target:
print("🞋 job: " + Fore.BLUE + args.target + Style.RESET_ALL) print("🞋 job: " + Fore.BLUE + args.target + Style.RESET_ALL)
deps = find_dependencies( deps = find_dependencies(
target_job=args.target, sha=REV, project_path=cur_project target_job=args.target, iid=pipe.iid, project_path=cur_project
) )
target_job_id, ret = monitor_pipeline( target_job_id, ret = monitor_pipeline(
cur_project, pipe, args.target, deps, args.force_manual, args.stress cur_project, pipe, args.target, deps, args.force_manual, args.stress

View File

@@ -1,74 +1,19 @@
fragment LinkedPipelineData on Pipeline { query getPipelineDetails($projectPath: ID!, $iid: ID!) {
id
iid
path
cancelable
retryable
userPermissions {
updatePipeline
}
status: detailedStatus {
id
group
label
icon
}
sourceJob {
id
name
}
project {
id
name
fullPath
}
}
query getPipelineDetails($projectPath: ID!, $sha: String!) {
project(fullPath: $projectPath) { project(fullPath: $projectPath) {
id id
pipeline(sha: $sha) { pipeline(iid: $iid) {
id id
iid iid
complete complete
downstream {
nodes {
...LinkedPipelineData
}
}
upstream {
...LinkedPipelineData
}
stages { stages {
nodes { nodes {
id name,
name
status: detailedStatus {
id
action {
id
icon
path
title
}
}
groups { groups {
nodes { nodes {
id
status: detailedStatus {
id
label
group
icon
}
name
size
jobs { jobs {
nodes { nodes {
id id
name name
kind
scheduledAt
needs { needs {
nodes { nodes {
id id