Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/regenerate_scheduled.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ jobs:
run: |
sudo apt-get -q update
sudo apt-get -q upgrade
sudo apt-get -q install binutils wget tar xmlstarlet podman python3-requests
sudo apt-get -q install binutils wget tar xmlstarlet podman python3-requests python3-gitlab
podman pull ghcr.io/gtk-rs/gtk4-rs/gtk4:latest
podman run --rm -v "$(pwd)":/app -w /app ghcr.io/gtk-rs/gtk4-rs/gtk4:latest python3 dl.py
./dl-win32.sh
Expand Down
153 changes: 51 additions & 102 deletions dl-gtk-macos.py
Original file line number Diff line number Diff line change
@@ -1,91 +1,29 @@
import requests
import os
import zipfile
import shutil
import gitlab

GITLAB_URL = "https://gitlab.gnome.org/"
PERSONAL_ACCESS_TOKEN = os.environ.get("GITLAB_KEY")
PROJECT_ID = "665" # The GNOME/GTK project
GITLAB_URL = "https://gitlab.gnome.org"
PROJECT_ID = 665 # GNOME/GTK project
JOB_NAME = "macos: [macosarm]"
ARTIFACT_NAME = "artifacts.zip"
SAVE_PATH = "./"
TARGET_FILE = "_build/gtk/GdkMacos-4.0.gir"

PIPELINES_URL = f"{GITLAB_URL}/api/v4/projects/{PROJECT_ID}/pipelines"
HEADERS = {"Authorization": f"Bearer {PERSONAL_ACCESS_TOKEN}"}


def http_get(url, *, expect_json=True, **kwargs):
response = requests.get(url, **kwargs)

content_type = response.headers.get("Content-Type", "")
print(f"\nGET {url}")
print(f"Status: {response.status_code}")
print(f"Content-Type: {content_type}")

preview = response.text[:500]
if preview:
print("Body preview:")
print(preview)
else:
print("Body preview: <empty>")

response.raise_for_status()

if expect_json:
if "application/json" not in content_type:
raise ValueError(f"Expected JSON response, got '{content_type}'")
return response.json()

return response


def get_job_with_artifact(pipeline_id, job_name):
url = f"{GITLAB_URL}/api/v4/projects/{PROJECT_ID}/pipelines/{pipeline_id}/jobs"

try:
jobs = http_get(url, headers=HEADERS)

for job in jobs:
if job["name"] == job_name:
return job

print(
f"No job with artifact '{ARTIFACT_NAME}' found in pipeline {pipeline_id}."
)
return None

except (requests.exceptions.RequestException, ValueError) as e:
print(f"Error fetching jobs: {e}")
return None


def download_artifact(job_id, save_path):
url = f"{GITLAB_URL}/api/v4/projects/{PROJECT_ID}/jobs/{job_id}/artifacts"

try:
response = http_get(
url,
headers=HEADERS,
stream=True,
expect_json=False,
)
PERSONAL_ACCESS_TOKEN = os.environ.get("GITLAB_KEY")
if not PERSONAL_ACCESS_TOKEN:
raise RuntimeError("GITLAB_KEY environment variable is not set")

artifact_path = os.path.join(save_path, ARTIFACT_NAME)
with open(artifact_path, "wb") as file:
for chunk in response.iter_content(chunk_size=1024):
if chunk:
file.write(chunk)

print(f"Artifact saved to: {artifact_path}")
return artifact_path
def download_and_extract_artifact(job, target_file, destination_path):
artifact_path = os.path.join(SAVE_PATH, ARTIFACT_NAME)

except requests.exceptions.RequestException as e:
print(f"Error downloading artifact: {e}")
return None
# Download artifacts archive
with open(artifact_path, "wb") as f:
job.artifacts(streamed=True, action=f.write)

print(f"Artifact saved to: {artifact_path}")

def extract_and_copy_file(artifact_path, target_file, destination_path):
try:
temp_dir = os.path.join(os.path.dirname(artifact_path), "temp_extracted")
os.makedirs(temp_dir, exist_ok=True)
Expand All @@ -100,42 +38,53 @@ def extract_and_copy_file(artifact_path, target_file, destination_path):
else:
print(f"File '{target_file}' not found in the extracted contents.")

shutil.rmtree(temp_dir)
os.remove(artifact_path)
finally:
shutil.rmtree(temp_dir, ignore_errors=True)
if os.path.exists(artifact_path):
os.remove(artifact_path)
print("Temporary files cleaned up.")

except Exception as e:
print(f"Error extracting or copying file: {e}")


def main():
try:
pipelines = http_get(
PIPELINES_URL,
headers=HEADERS,
params={
"order_by": "updated_at",
"sort": "desc",
},
)

if not pipelines:
exit("No pipelines found.")

for pipeline in pipelines:
job = get_job_with_artifact(pipeline["id"], JOB_NAME)
if not job or job["status"] != "success":
gl = gitlab.Gitlab(
GITLAB_URL,
private_token=PERSONAL_ACCESS_TOKEN,
)

project = gl.projects.get(PROJECT_ID)

pipelines = project.pipelines.list(
order_by="updated_at",
sort="desc",
per_page=20,
)

if not pipelines:
raise RuntimeError("No pipelines found")

for pipeline in pipelines:
pipeline = project.pipelines.get(pipeline.id)

jobs = pipeline.jobs.list(all=True)

for job in jobs:
if job.name != JOB_NAME:
continue
if job.status != "success":
continue
if not job.artifacts_file:
continue

print(f"Job '{job['name']}' succeeded (Job ID: {job['id']}).")
print(f"Job '{job.name}' succeeded (Job ID: {job.id}).")

artifact_path = download_artifact(job["id"], SAVE_PATH)
if artifact_path:
extract_and_copy_file(artifact_path, TARGET_FILE, "./")
break
download_and_extract_artifact(
job,
TARGET_FILE,
"./",
)
return

except (requests.exceptions.RequestException, ValueError) as e:
exit(f"Error fetching pipelines: {e}")
print("No successful job with artifacts found.")


if __name__ == "__main__":
Expand Down
Loading