Skip to content

Commit 1de8a8d

Browse files
Merge pull request #11 from hotosm/feature/zenml
Feature : Zenml and 3.0 Architecture with example local setup
2 parents e0809a6 + 92baaa1 commit 1de8a8d

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

44 files changed

+7127
-1
lines changed

.github/scripts/detect_models.py

Lines changed: 67 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,67 @@
1+
"""Detect models with Dockerfiles for CI/CD."""
2+
3+
import json
4+
import os
5+
import subprocess
6+
from pathlib import Path
7+
from typing import TypedDict
8+
9+
import pystac
10+
11+
12+
class ModelInfo(TypedDict):
13+
name: str
14+
version: str
15+
path: str
16+
17+
18+
def get_changed_models() -> set[str] | None:
19+
if os.getenv("EVENT_NAME") != "pull_request":
20+
return None
21+
base_sha = os.getenv("BASE_SHA")
22+
head_sha = os.getenv("HEAD_SHA")
23+
if not base_sha or not head_sha:
24+
return None
25+
try:
26+
result = subprocess.run(
27+
["git", "diff", "--name-only", base_sha, head_sha],
28+
capture_output=True,
29+
text=True,
30+
check=True,
31+
)
32+
return {
33+
Path(f).parts[0] + "/" + Path(f).parts[1]
34+
for f in result.stdout.splitlines()
35+
if f.startswith("models/") and len(Path(f).parts) > 1
36+
}
37+
except subprocess.CalledProcessError:
38+
return None
39+
40+
41+
def find_models() -> list[ModelInfo]:
42+
models_dir = Path("models")
43+
if not models_dir.exists():
44+
return []
45+
46+
changed = get_changed_models()
47+
models: list[ModelInfo] = []
48+
49+
for model_path in sorted(models_dir.iterdir()):
50+
if not model_path.is_dir() or not (model_path / "Dockerfile").exists():
51+
continue
52+
if changed and str(model_path) not in changed:
53+
continue
54+
55+
stac_file = model_path / "stac-item.json"
56+
if not stac_file.exists():
57+
raise FileNotFoundError(f"Missing stac-item.json for {model_path.name}")
58+
59+
item = pystac.Item.from_file(str(stac_file))
60+
version = item.properties["version"]
61+
models.append({"name": model_path.name, "version": version, "path": str(model_path)})
62+
63+
return models
64+
65+
66+
if __name__ == "__main__":
67+
print(json.dumps({"include": find_models()}))
Lines changed: 76 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,76 @@
1+
name: Build Model Images
2+
3+
on:
4+
pull_request:
5+
paths: ["models/**"]
6+
push:
7+
branches: [master]
8+
paths: ["models/**"]
9+
workflow_dispatch:
10+
11+
env:
12+
REGISTRY: ghcr.io
13+
14+
concurrency:
15+
group: ${{ github.workflow }}-${{ github.ref }}
16+
cancel-in-progress: true
17+
18+
jobs:
19+
detect:
20+
runs-on: ubuntu-latest
21+
outputs:
22+
matrix: ${{ steps.models.outputs.matrix }}
23+
steps:
24+
- uses: actions/checkout@v4
25+
with:
26+
fetch-depth: 0
27+
- name: Fetch base ref
28+
if: github.event_name == 'pull_request'
29+
run: git fetch origin ${{ github.event.pull_request.base.ref }}
30+
- uses: actions/setup-python@v5
31+
with:
32+
python-version: "3.13"
33+
- run: pip install pystac
34+
- id: models
35+
run: echo "matrix=$(python .github/scripts/detect_models.py)" >> $GITHUB_OUTPUT
36+
env:
37+
EVENT_NAME: ${{ github.event_name }}
38+
BASE_SHA: ${{ github.event.pull_request.base.sha }}
39+
HEAD_SHA: ${{ github.sha }}
40+
41+
build:
42+
needs: detect
43+
if: needs.detect.outputs.matrix != '{"include":[]}'
44+
runs-on: ubuntu-latest
45+
permissions:
46+
contents: read
47+
packages: write
48+
strategy:
49+
matrix: ${{ fromJson(needs.detect.outputs.matrix) }}
50+
steps:
51+
- uses: actions/checkout@v4
52+
- uses: docker/setup-buildx-action@v3
53+
- uses: docker/login-action@v3
54+
with:
55+
registry: ${{ env.REGISTRY }}
56+
username: ${{ github.actor }}
57+
password: ${{ secrets.GITHUB_TOKEN }}
58+
59+
- uses: docker/metadata-action@v5
60+
id: meta
61+
with:
62+
images: ${{ env.REGISTRY }}/${{ github.repository_owner }}/fair-models/${{ matrix.name }}
63+
tags: |
64+
type=ref,event=pr,prefix=pr-
65+
type=raw,value=v${{ matrix.version }},enable={{is_default_branch}}
66+
type=raw,value=latest,enable={{is_default_branch}}
67+
68+
- uses: docker/build-push-action@v6
69+
with:
70+
context: .
71+
file: ${{ matrix.path }}/Dockerfile
72+
push: true
73+
tags: ${{ steps.meta.outputs.tags }}
74+
labels: ${{ steps.meta.outputs.labels }}
75+
cache-from: type=gha,scope=${{ matrix.name }}
76+
cache-to: type=gha,mode=max,scope=${{ matrix.name }}

.github/workflows/build-zenml.yml

Lines changed: 48 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,48 @@
1+
name: build & integration of zenml workflow
2+
on:
3+
push:
4+
branches:
5+
- master
6+
pull_request:
7+
branches:
8+
- master
9+
defaults:
10+
run:
11+
shell: bash
12+
concurrency:
13+
group: ${{ github.workflow }}-${{ github.ref }}
14+
cancel-in-progress: true
15+
jobs:
16+
example:
17+
name: example workflow
18+
runs-on: ubuntu-latest
19+
steps:
20+
- name: Clone repo
21+
uses: actions/checkout@v4
22+
with:
23+
fetch-depth: 0
24+
- name: Set up uv
25+
uses: astral-sh/setup-uv@v5
26+
with:
27+
python-version: "3.13"
28+
enable-cache: true
29+
- name: Install dependencies
30+
run: uv sync --group example --group local
31+
- name: Start ZenML server
32+
run: |
33+
uv run zenml init
34+
uv run zenml up --blocking &
35+
sleep 10
36+
uv run zenml status
37+
- name: Run example workflow
38+
run: uv run python examples/unet/run.py all
39+
- name: Verify artifacts
40+
run: |
41+
test -d stac_catalog || { echo "STAC catalog not created"; exit 1; }
42+
test -f stac_catalog/base-models/example-unet/example-unet.json || { echo "Base model not registered"; exit 1; }
43+
test -f stac_catalog/datasets/buildings-banepa/buildings-banepa.json || { echo "Dataset not registered"; exit 1; }
44+
ls -1 stac_catalog/local-models/ | grep -q "example-unet-finetuned-banepa-v" || { echo "Local model not published"; exit 1; }
45+
test -d artifacts || { echo "Artifacts directory not created"; exit 1; }
46+
test -d data/sample/predict/predictions || { echo "Predictions directory not created"; exit 1; }
47+
test -n "$(find data/sample/predict/predictions -name '*.tif' -o -name '*.png' 2>/dev/null)" || { echo "No prediction files found"; exit 1; }
48+
echo "All workflow outputs verified: STAC catalog + artifacts + predictions"

.github/workflows/publish-pypi.yml

Lines changed: 67 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,67 @@
1+
name: Publish to PyPI
2+
3+
on:
4+
release:
5+
types:
6+
- published
7+
8+
permissions:
9+
contents: read
10+
11+
jobs:
12+
build:
13+
name: build
14+
runs-on: ubuntu-latest
15+
steps:
16+
- name: Clone repo
17+
uses: actions/checkout@v4
18+
with:
19+
fetch-depth: 0
20+
- name: Set up uv
21+
uses: astral-sh/setup-uv@v5
22+
with:
23+
python-version: "3.13"
24+
enable-cache: true
25+
- name: Build sdist and wheel
26+
run: uv build
27+
- name: Upload artifacts
28+
uses: actions/upload-artifact@v4
29+
with:
30+
name: dist
31+
path: dist/
32+
33+
test:
34+
name: test artifacts
35+
needs: build
36+
runs-on: ubuntu-latest
37+
steps:
38+
- name: Download artifacts
39+
uses: actions/download-artifact@v4
40+
with:
41+
name: dist
42+
path: dist/
43+
- name: Set up uv
44+
uses: astral-sh/setup-uv@v5
45+
with:
46+
python-version: "3.13"
47+
- name: Install from wheel and verify version
48+
run: |
49+
uv venv /tmp/test-env
50+
VIRTUAL_ENV=/tmp/test-env uv pip install dist/*.whl
51+
/tmp/test-env/bin/python -c "import fair; print(fair.__version__)"
52+
53+
publish:
54+
name: publish (PyPI)
55+
needs: [build, test]
56+
runs-on: ubuntu-latest
57+
environment: pypi
58+
permissions:
59+
id-token: write
60+
steps:
61+
- name: Download artifacts
62+
uses: actions/download-artifact@v4
63+
with:
64+
name: dist
65+
path: dist/
66+
- name: Publish to PyPI
67+
uses: pypa/gh-action-pypi-publish@release/v1
Lines changed: 69 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,69 @@
1+
name: Publish to TestPyPI
2+
3+
on:
4+
push:
5+
tags:
6+
- "v*"
7+
8+
permissions:
9+
contents: read
10+
11+
jobs:
12+
build:
13+
name: build
14+
runs-on: ubuntu-latest
15+
steps:
16+
- name: Clone repo
17+
uses: actions/checkout@v4
18+
with:
19+
fetch-depth: 0
20+
- name: Set up uv
21+
uses: astral-sh/setup-uv@v5
22+
with:
23+
python-version: "3.13"
24+
enable-cache: true
25+
- name: Build sdist and wheel
26+
run: uv build
27+
- name: Upload artifacts
28+
uses: actions/upload-artifact@v4
29+
with:
30+
name: dist
31+
path: dist/
32+
33+
test:
34+
name: test artifacts
35+
needs: build
36+
runs-on: ubuntu-latest
37+
steps:
38+
- name: Download artifacts
39+
uses: actions/download-artifact@v4
40+
with:
41+
name: dist
42+
path: dist/
43+
- name: Set up uv
44+
uses: astral-sh/setup-uv@v5
45+
with:
46+
python-version: "3.13"
47+
- name: Install from wheel and verify version
48+
run: |
49+
uv venv /tmp/test-env
50+
VIRTUAL_ENV=/tmp/test-env uv pip install dist/*.whl
51+
/tmp/test-env/bin/python -c "import fair; print(fair.__version__)"
52+
53+
publish:
54+
name: publish (TestPyPI)
55+
needs: [build, test]
56+
runs-on: ubuntu-latest
57+
environment: testpypi
58+
permissions:
59+
id-token: write
60+
steps:
61+
- name: Download artifacts
62+
uses: actions/download-artifact@v4
63+
with:
64+
name: dist
65+
path: dist/
66+
- name: Publish to TestPyPI
67+
uses: pypa/gh-action-pypi-publish@release/v1
68+
with:
69+
repository-url: https://test.pypi.org/legacy/

0 commit comments

Comments
 (0)