Full parallel #3145
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| name: Docs | |
| concurrency: | |
| group: ${{ github.workflow }}-${{ github.event.number }}-${{ github.event.ref }} | |
| cancel-in-progress: true | |
| on: | |
| push: | |
| branches: [ master, develop ] | |
| pull_request: | |
| branches: [ master, develop ] | |
| permissions: | |
| contents: write | |
| pages: write | |
| id-token: write | |
| jobs: | |
| build_docs: | |
| runs-on: ${{ matrix.os }} | |
| strategy: | |
| fail-fast: true | |
| matrix: | |
| os: [ ubuntu-latest ] | |
| python-version: [ "3.10" ] | |
| steps: | |
| - uses: actions/checkout@v4 | |
| - name: Install uv | |
| uses: astral-sh/setup-uv@v5 | |
| with: | |
| python-version: ${{ matrix.python-version }} | |
| enable-cache: true | |
| - name: Restore MNE Data Cache | |
| id: cache-mne_data | |
| uses: actions/cache/restore@v4 | |
| with: | |
| path: ~/mne_data | |
| key: doc-${{ runner.os }}-mne-data-v2-${{ github.run_id }} | |
| restore-keys: | | |
| doc-${{ runner.os }}-mne-data-v2- | |
| - name: Clean up corrupted cache | |
| run: | | |
| echo "Cache matched key: ${{ steps.cache-mne_data.outputs.cache-matched-key }}" | |
| mkdir -p ~/mne_data | |
| # Remove any incomplete extractions (e.g., BIDS.zip.unzip alongside final folder) | |
| if [ -d ~/mne_data/BIDS.zip.unzip ]; then | |
| echo "Removing incomplete extraction: BIDS.zip.unzip" | |
| rm -rf ~/mne_data/BIDS.zip.unzip | |
| fi | |
| if [ -d ~/mne_data/BrainForm-BIDS-eeg-dataset ] && [ -f ~/mne_data/BrainForm-BIDS-eeg-dataset/.cache-incomplete ]; then | |
| echo "Removing incomplete BrainForm dataset" | |
| rm -rf ~/mne_data/BrainForm-BIDS-eeg-dataset | |
| fi | |
| - name: Cache docs build | |
| id: cache-docs | |
| uses: actions/cache@v3 | |
| with: | |
| key: docs-build-${{ github.run_id }}-${{ github.run_attempt }} | |
| path: docs/build | |
| - name: Install moabb | |
| run: | | |
| uv pip install -e .[docs,deeplearning,optuna,external,carbonemission] | |
| - name: Pre-download datasets (cold cache only) | |
| if: steps.cache-mne_data.outputs.cache-matched-key == '' | |
| run: | | |
| echo "Cache is cold, pre-downloading datasets with delays to avoid rate limiting..." | |
| python << 'EOF' | |
| import sys | |
| import time | |
| # All datasets used in examples that download from Zenodo | |
| # Each entry: (module_path, class_name, subjects) | |
| # subjects can be an int (first N) or a list of specific subject IDs | |
| datasets_to_download = [ | |
| ('moabb.datasets', 'BNCI2014_001', 3), | |
| ('moabb.datasets', 'BNCI2014_009', 2), | |
| ('moabb.datasets', 'Zhou2016', 4), # All 4 subjects | |
| ('moabb.datasets', 'AlexMI', 1), | |
| ('moabb.datasets', 'Kalunga2016', 2), | |
| ('moabb.datasets', 'Cattan2019_VR', 1), | |
| ('moabb.datasets', 'Hinss2021', 1), | |
| ('moabb.datasets', 'Dreyer2023A', [1, 5, 7, 35]), # plot_dreyer example | |
| ] | |
| failed = [] | |
| for module_path, class_name, subj_spec in datasets_to_download: | |
| if isinstance(subj_spec, list): | |
| subjects_label = f"subjects {subj_spec}" | |
| else: | |
| subjects_label = f"{subj_spec} subjects" | |
| print(f'\n{"="*60}') | |
| print(f'Pre-downloading {class_name} ({subjects_label})...') | |
| print(f'{"="*60}') | |
| success = False | |
| for attempt in range(5): # More retries | |
| try: | |
| import importlib | |
| module = importlib.import_module(module_path) | |
| cls = getattr(module, class_name) | |
| ds = cls() | |
| if isinstance(subj_spec, list): | |
| subjects = subj_spec | |
| else: | |
| subjects = ds.subject_list[:subj_spec] | |
| ds.download(subject_list=subjects) | |
| print(f'SUCCESS: {class_name} downloaded ({subjects_label})') | |
| success = True | |
| break | |
| except Exception as e: | |
| print(f'Attempt {attempt + 1}/5 failed: {e}') | |
| if attempt < 4: | |
| wait = 90 * (attempt + 1) # 90s, 180s, 270s, 360s | |
| print(f'Waiting {wait}s before retry...') | |
| time.sleep(wait) | |
| if not success: | |
| failed.append(class_name) | |
| print(f'FAILED: {class_name} after 5 attempts') | |
| # Wait between datasets to avoid rate limiting | |
| print('Waiting 60s before next dataset...') | |
| time.sleep(60) | |
| if failed: | |
| print(f'\n{"="*60}') | |
| print(f'ERROR: Failed to download: {", ".join(failed)}') | |
| print(f'{"="*60}') | |
| sys.exit(1) | |
| else: | |
| print(f'\n{"="*60}') | |
| print('All datasets downloaded successfully!') | |
| print(f'{"="*60}') | |
| EOF | |
| - name: Build docs | |
| env: | |
| # Disable parallel builds when cache is cold to avoid Zenodo rate limiting | |
| SPHINX_GALLERY_PARALLEL: ${{ steps.cache-mne_data.outputs.cache-matched-key != '' }} | |
| SPHINX_JOBS: ${{ steps.cache-mne_data.outputs.cache-matched-key != '' && 'auto' || '1' }} | |
| run: | | |
| echo "Cache matched key: ${{ steps.cache-mne_data.outputs.cache-matched-key }}" | |
| echo "Parallel gallery builds: $SPHINX_GALLERY_PARALLEL" | |
| echo "Sphinx jobs: $SPHINX_JOBS" | |
| # Pre-create .mne directory to avoid race condition when parallel | |
| # gallery examples call mne.set_config() simultaneously | |
| mkdir -p ~/.mne | |
| cd docs && make html | |
| - name: Generate notebooks from examples (Colab) | |
| run: | | |
| set -euo pipefail | |
| echo "Converting Python examples to notebooks in docs/build/html/auto_examples/_notebooks..." | |
| mkdir -p docs/build/html/auto_examples/_notebooks | |
| find examples -type f -name '*.py' | while read -r f; do | |
| rel="${f#examples/}" | |
| out_dir="docs/build/html/auto_examples/_notebooks/$(dirname "$rel")" | |
| mkdir -p "$out_dir" | |
| base="$(basename "$rel" .py)" | |
| out_path="$out_dir/$base.ipynb" | |
| python .github/scripts/convert_to_notebook.py --input "$f" --output "$out_path" | |
| done | |
| - name: Save MNE Data Cache | |
| if: success() | |
| uses: actions/cache/save@v4 | |
| with: | |
| path: ~/mne_data | |
| key: doc-${{ runner.os }}-mne-data-v2-${{ github.run_id }} | |
| # Create an artifact of the html output. | |
| - uses: actions/upload-artifact@v4 | |
| with: | |
| name: DocumentationHTML | |
| path: docs/build/html/ | |
| deploy_neurotechx: | |
| if: ${{ github.ref == 'refs/heads/develop' }} | |
| needs: build_docs | |
| runs-on: ${{ matrix.os }} | |
| strategy: | |
| fail-fast: false | |
| matrix: | |
| os: [ ubuntu-latest ] | |
| steps: | |
| - uses: actions/checkout@v4 | |
| - name: Restore cached docs build | |
| id: cache-docs | |
| uses: actions/cache/restore@v3 | |
| with: | |
| key: docs-build-${{ github.run_id }}-${{ github.run_attempt }} | |
| path: docs/build | |
| - name: Check cache hit | |
| if: steps.cache-docs.outputs.cache-hit != 'true' | |
| run: exit 1 | |
| - name: Deploy Neurotechx Subpage | |
| uses: peaceiris/actions-gh-pages@v4 | |
| with: | |
| github_token: ${{ secrets.GITHUB_TOKEN }} | |
| deploy_key: ${{ secrets.ACTIONS_DEPLOY_KEY }} | |
| external_repository: NeuroTechX/moabb.github.io | |
| destination_dir: docs/ | |
| publish_branch: master | |
| publish_dir: ./docs/build/html | |
| cname: moabb.neurotechx.com/ | |
| deploy_gh_pages: | |
| if: ${{ github.ref == 'refs/heads/develop' }} | |
| needs: build_docs | |
| runs-on: ${{ matrix.os }} | |
| strategy: | |
| fail-fast: false | |
| matrix: | |
| os: [ ubuntu-latest ] | |
| steps: | |
| - uses: actions/checkout@v4 | |
| - name: Restore cached docs build | |
| id: cache-docs | |
| uses: actions/cache/restore@v3 | |
| with: | |
| key: docs-build-${{ github.run_id }}-${{ github.run_attempt }} | |
| path: docs/build | |
| - name: Check cache hit | |
| if: steps.cache-docs.outputs.cache-hit != 'true' | |
| run: exit 1 | |
| - name: Deploy gh-pages | |
| uses: peaceiris/actions-gh-pages@v4 | |
| with: | |
| github_token: ${{ secrets.GITHUB_TOKEN }} | |
| deploy_key: ${{ secrets.MOABB_DEPLOY_KEY_NEW }} | |
| destination_dir: docs/ | |
| publish_branch: gh-pages | |
| publish_dir: ./docs/build/html | |
| cname: neurotechx.github.io/moabb/ |