-
Notifications
You must be signed in to change notification settings - Fork 1
105 lines (91 loc) · 3.67 KB
/
ci.yml
File metadata and controls
105 lines (91 loc) · 3.67 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
name: compose-ci
on: [push, pull_request]
jobs:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
submodules: true
- uses: actions/setup-python@v5
with:
python-version: "3.11"
- name: Install test dependencies
run: |
pip install torch torchvision --index-url https://download.pytorch.org/whl/cpu
pip install pytest xarray netCDF4 numpy matplotlib scikit-learn pillow
# Create a dummy checkpoint so setup_device_and_model hits the except
# branch and downloads DINO weights from Facebook's hub instead.
- name: Create dummy checkpoint for DINO hub fallback
run: touch checkpoint.pth
- name: Run tests
run: pytest tests/ -v
pipeline:
runs-on: ubuntu-latest
steps:
#--------------------------------------------------------------
# 1. Checkout (include submodules such as raw_consumer/pyEcholab)
#--------------------------------------------------------------
- name: Checkout repository (with submodules)
uses: actions/checkout@v4
with:
submodules: recursive
fetch-depth: 0
#--------------------------------------------------------------
# 2. (Optional) pull a sample .raw file so the pipeline has input
#--------------------------------------------------------------
- name: Download test data
run: |
mkdir -p data/input
aws s3 cp --no-sign-request \
"s3://noaa-wcsd-pds/data/raw/Bell_M._Shimada/SH2306/EK80/Hake-D20230811-T165727.raw" \
data/input/
touch inference/checkpoint.pth
#--------------------------------------------------------------
# 3. Build images & start RAW → PRE → INFER stack detached
#--------------------------------------------------------------
- name: Start Docker pipeline (detached)
run: |
docker compose -f docker-compose.yml up --build -d \
raw preprocessing infer
#--------------------------------------------------------------
# 4. Poll for inference output (max 5 min)
#--------------------------------------------------------------
- name: Wait for inference output (max 5 min)
run: |
end=$((SECONDS+300))
echo "Polling for PNGs in data/inference ..."
while [ $SECONDS -lt $end ]; do
cnt=$(find data/inference -type f -name '*.png' | wc -l)
if [ "$cnt" -gt 0 ]; then
echo "✅ Found $cnt PNG(s) – pipeline succeeded."
exit 0
fi
sleep 10
done
echo "❌ No PNGs produced in 5 minutes."
docker compose -f docker-compose.yml logs --tail 50
exit 1
#--------------------------------------------------------------
# 5. Upload all generated PNGs as artifacts
#--------------------------------------------------------------
- name: Upload preprocessing PNGs
if: always()
uses: actions/upload-artifact@v4
with:
name: preprocessing-pngs
path: data/preprocessing/**/*.jpg
retention-days: 7
- name: Upload inference PNGs
if: always()
uses: actions/upload-artifact@v4
with:
name: inference-pngs
path: data/inference/**/*.png
retention-days: 7
#--------------------------------------------------------------
# 6. Tear down containers & volumes (always runs)
#--------------------------------------------------------------
- name: Teardown
if: always()
run: docker compose -f docker-compose.yml down -v