ci/bin: Cache GQL queries

To avoid abusing fd.o Gitlab instance, let's cache API queries that
should have the same response. This will speedup the use of the client
tools as well.

Signed-off-by: Guilherme Gallo <guilherme.gallo@collabora.com>
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/17791>
This commit is contained in:
Guilherme Gallo
2022-08-02 18:06:34 -03:00
committed by Marge Bot
parent 101697ceb3
commit 266e3a627f
2 changed files with 14 additions and 4 deletions

View File

@@ -8,6 +8,7 @@ from pathlib import Path
from typing import Any, Iterable, Optional, Pattern, Union
import yaml
from filecache import DAY, filecache
from gql import Client, gql
from gql.transport.aiohttp import AIOHTTPTransport
from graphql import DocumentNode
@@ -57,6 +58,7 @@ class GitlabGQL:
transport=self._transport, fetch_schema_from_transport=True
)
@filecache(DAY)
def query(
self, gql_file: Union[Path, str], params: dict[str, Any]
) -> dict[str, Any]:
@@ -72,6 +74,9 @@ class GitlabGQL:
# Execute the query on the transport
return self.client.execute(query, variable_values=params)
def invalidate_query_cache(self):
self.query._db.clear()
def create_job_needs_dag(
gl_gql: GitlabGQL, params
@@ -121,12 +126,16 @@ def fetch_merged_yaml(gl_gql: GitlabGQL, params) -> dict[Any]:
content = Path(gitlab_yml_file).read_text()
params["content"] = content
raw_response = gl_gql.query("job_details.gql", params)
merged_yaml = raw_response["ciConfig"]["mergedYaml"]
assert merged_yaml, """
if merged_yaml := raw_response["ciConfig"]["mergedYaml"]:
return yaml.safe_load(merged_yaml)
gl_gql.invalidate_query_cache()
raise ValueError(
"""
Could not fetch any content for merged YAML,
please verify if the git SHA exists in remote.
Maybe you forgot to `git push`?"""
return yaml.safe_load(merged_yaml)
Maybe you forgot to `git push`? """
)
def recursive_fill(job, relationship_field, target_data, acc_data: dict, merged_yaml):

View File

@@ -1,4 +1,5 @@
colorama==0.4.5
filecache==0.81
gql==3.4.0
python-gitlab==3.5.0
ruamel.yaml.clib==0.2.6