added connector for lora skip #2430
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| # BioNeMo Framework CI Workflow | |
| # | |
| # This workflow runs tests for BioNeMo framework sub-packages using a matrix | |
| # strategy where each sub-package runs in its own container -- matching the | |
| # pattern used by unit-tests-recipes.yml. | |
| # | |
| # TRIGGERS: | |
| # - Push to main branch, pull-request branches, or dependabot branches | |
| # - Merge group events (when PRs are merged via merge queue) | |
| # - Scheduled runs (daily at 7 AM UTC) | |
| # | |
| # WORKFLOW OVERVIEW: | |
| # 1. changed-files: Detects which sub-packages changed and computes the test matrix | |
| # 2. pre-commit: Runs static code checks and linting | |
| # 3. get-pr-labels: Retrieves PR labels for conditional job execution | |
| # 4. run-tests / run-tests-slow / run-tests-notebooks: Per-sub-package matrix jobs | |
| # 5. verify-tests-status: Verifies all test jobs completed successfully | |
| name: "BioNeMo Framework CI" | |
| on: | |
| push: | |
| branches: | |
| - main | |
| - "pull-request/[0-9]+" | |
| - "dependabot/**" | |
| merge_group: | |
| types: [checks_requested] | |
| schedule: | |
| - cron: "0 7 * * *" # Runs at 7 AM UTC daily (12 AM MST) | |
| defaults: | |
| run: | |
| shell: bash -x -e -u -o pipefail {0} | |
| concurrency: | |
| group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} | |
| cancel-in-progress: true | |
| jobs: | |
| changed-files: | |
| runs-on: ubuntu-latest | |
| outputs: | |
| any_changed: ${{ steps.changed-files.outputs.any_changed }} | |
| matrix: ${{ steps.matrix.outputs.matrix }} | |
| steps: | |
| - uses: actions/checkout@v4 | |
| with: | |
| fetch-depth: 0 | |
| - name: Get merge-base commit | |
| id: merge-base | |
| run: | | |
| MERGE_BASE=$(git merge-base HEAD origin/main) | |
| echo "merge-base=$MERGE_BASE" >> $GITHUB_OUTPUT | |
| - uses: step-security/changed-files@v46 | |
| id: changed-files | |
| with: | |
| json: true | |
| matrix: true | |
| base_sha: ${{ steps.merge-base.outputs.merge-base }} | |
| dir_names: true | |
| dir_names_max_depth: 2 | |
| files: | | |
| sub-packages/** | |
| .github/workflows/unit-tests-framework.yml | |
| - name: Show output | |
| run: | | |
| echo '${{ toJSON(steps.changed-files.outputs) }}' | |
| shell: bash | |
| - name: Compute test matrix from changed files | |
| id: matrix | |
| env: | |
| CHANGED_JSON: ${{ steps.changed-files.outputs.all_changed_files }} | |
| ANY_CHANGED: ${{ steps.changed-files.outputs.any_changed }} | |
| IS_SCHEDULE: ${{ github.event_name == 'schedule' }} | |
| run: | | |
| python3 - <<'PY' | |
| import json, os, tomllib | |
| # --- Parse tach.toml for the dependency graph --- | |
| with open("tach.toml", "rb") as f: | |
| tach = tomllib.load(f) | |
| # Map each module to its sub-package by checking which source root | |
| # actually contains the module directory on disk. | |
| mod_to_pkg = {} | |
| for mod in tach.get("modules", []): | |
| mod_dir = mod["path"].replace(".", "/") | |
| for sr in tach.get("source_roots", []): | |
| parts = sr.split("/") | |
| if len(parts) >= 2 and parts[0] == "sub-packages" and os.path.isdir(os.path.join(sr, mod_dir)): | |
| mod_to_pkg[mod["path"]] = parts[1] | |
| break | |
| # Build the package dependency dict from tach modules. | |
| packages = {} | |
| for mod in tach.get("modules", []): | |
| pkg = mod_to_pkg.get(mod["path"]) | |
| if pkg is None: | |
| continue | |
| deps = [] | |
| for dep_mod in mod.get("depends_on", []): | |
| dep_pkg = mod_to_pkg.get(dep_mod) | |
| if dep_pkg and dep_pkg != pkg: | |
| deps.append(dep_pkg) | |
| packages[pkg] = sorted(set(deps)) | |
| print(f"Packages from tach.toml: { {k: v for k, v in sorted(packages.items())} }") | |
| # --- Determine which packages changed --- | |
| raw = (os.environ.get("CHANGED_JSON") or "").strip() | |
| if raw: | |
| try: | |
| changed = json.loads(raw) | |
| except json.JSONDecodeError: | |
| changed = json.loads(raw.replace('\\"', '"')) | |
| else: | |
| changed = [] | |
| any_changed = os.environ.get("ANY_CHANGED", "false") == "true" | |
| is_schedule = os.environ.get("IS_SCHEDULE", "false") == "true" | |
| # Reverse dependency map: package -> downstream dependents. | |
| reverse_deps = {name: [] for name in packages} | |
| for name, deps in packages.items(): | |
| for dep in deps: | |
| reverse_deps.setdefault(dep, []).append(name) | |
| if is_schedule: | |
| to_test = set(packages.keys()) | |
| elif any_changed: | |
| infra_changed = any(not c.startswith("sub-packages/") for c in changed) | |
| if infra_changed: | |
| to_test = set(packages.keys()) | |
| else: | |
| directly_changed = set() | |
| for c in changed: | |
| parts = c.split("/") | |
| if len(parts) >= 2 and parts[0] == "sub-packages": | |
| pkg = parts[1] | |
| if pkg in packages: | |
| directly_changed.add(pkg) | |
| to_test = set(directly_changed) | |
| queue = list(directly_changed) | |
| while queue: | |
| pkg = queue.pop() | |
| for rdep in reverse_deps.get(pkg, []): | |
| if rdep not in to_test: | |
| to_test.add(rdep) | |
| queue.append(rdep) | |
| else: | |
| to_test = set() | |
| # --- Build matrix output --- | |
| matrix = [] | |
| for name in sorted(to_test): | |
| matrix.append({ | |
| "name": name, | |
| "dir": f"sub-packages/{name}", | |
| "deps": [f"sub-packages/{d}" for d in packages[name]], | |
| }) | |
| result = json.dumps(matrix) | |
| with open(os.environ["GITHUB_OUTPUT"], "a") as f: | |
| f.write(f"matrix={result}\n") | |
| print(f"Matrix ({len(matrix)} packages): {result}") | |
| PY | |
| pre-commit: | |
| runs-on: ubuntu-latest | |
| steps: | |
| - uses: actions/checkout@v4 | |
| with: | |
| fetch-depth: 0 | |
| - uses: actions/setup-python@v5 | |
| with: | |
| python-version: "3.13" | |
| - name: Setup UV | |
| uses: astral-sh/setup-uv@v6 | |
| with: | |
| enable-cache: true | |
| - run: | | |
| uv tool install pre-commit --with pre-commit-uv --force-reinstall | |
| uv tool install tach>=0.9.0 | |
| uv tool update-shell | |
| - run: ./ci/scripts/static_checks.sh | |
| get-pr-labels: | |
| runs-on: ubuntu-latest | |
| outputs: | |
| labels: ${{ steps.get-labels.outputs.labels || steps.get-labels-empty.outputs.labels }} | |
| steps: | |
| - name: Get PR number from branch | |
| if: startsWith(github.ref, 'refs/heads/pull-request/') | |
| id: get-pr-num | |
| run: | | |
| PR_NUM=$(echo ${{ github.ref_name }} | grep -oE '[0-9]+$') | |
| echo "pr_num=$PR_NUM" >> $GITHUB_OUTPUT | |
| - name: Get PR labels | |
| id: get-labels | |
| if: startsWith(github.ref, 'refs/heads/pull-request/') | |
| env: | |
| GH_TOKEN: ${{ github.token }} | |
| run: | | |
| LABELS=$(gh api repos/${{ github.repository }}/pulls/${{ steps.get-pr-num.outputs.pr_num }} --jq '[.labels[].name]' || echo "[]") | |
| echo "labels=$LABELS" >> $GITHUB_OUTPUT | |
| - name: Set empty labels for non-PR branches | |
| if: ${{ !startsWith(github.ref, 'refs/heads/pull-request/') }} | |
| id: get-labels-empty | |
| run: | | |
| echo "labels=[]" >> $GITHUB_OUTPUT | |
| run-tests: | |
| needs: | |
| - pre-commit | |
| - changed-files | |
| - get-pr-labels | |
| runs-on: linux-amd64-gpu-l4-latest-1 | |
| if: | | |
| (github.event_name == 'schedule') || | |
| contains(fromJSON(needs.get-pr-labels.outputs.labels || '[]'), 'ciflow:all') || | |
| ( | |
| !contains(fromJSON(needs.get-pr-labels.outputs.labels || '[]'), 'ciflow:skip') && | |
| (needs.changed-files.outputs.any_changed == 'true') | |
| ) | |
| name: "unit-tests (${{ matrix.pkg.name }})" | |
| container: | |
| image: svcbionemo023/bionemo-framework:pytorch26.03-py3-squashed | |
| options: --shm-size=16G | |
| env: | |
| CI: true | |
| HF_TOKEN: ${{ secrets.HF_TOKEN }} | |
| HF_HOME: /cache/huggingface | |
| BIONEMO_DATA_SOURCE: ngc | |
| strategy: | |
| matrix: | |
| pkg: ${{ fromJson(needs.changed-files.outputs.matrix) }} | |
| fail-fast: false | |
| steps: | |
| - name: Show GPU info | |
| run: nvidia-smi | |
| - name: Setup proxy cache | |
| uses: nv-gha-runners/setup-proxy-cache@main | |
| - name: Checkout repository | |
| uses: actions/checkout@v4 | |
| with: | |
| sparse-checkout: | | |
| sub-packages | |
| LICENSE | |
| sparse-checkout-cone-mode: true | |
| - name: Install dependencies and package | |
| working-directory: ${{ matrix.pkg.dir }} | |
| env: | |
| DEPS: ${{ toJson(matrix.pkg.deps) }} | |
| run: | | |
| # Install internal dependencies from local checkout first. | |
| for dep in $(echo "$DEPS" | jq -r '.[]'); do | |
| PIP_CONSTRAINT= pip install -e "../../$dep" | |
| done | |
| # Install the target sub-package. | |
| if [ -f .ci_build.sh ]; then | |
| bash .ci_build.sh | |
| else | |
| PIP_CONSTRAINT= pip install -e . | |
| fi | |
| # Install test dependencies if declared. | |
| PIP_CONSTRAINT= pip install pytest pytest-cov pytest-timeout || true | |
| PIP_CONSTRAINT= pip install -e ".[test]" 2>/dev/null || true | |
| - name: Run tests | |
| working-directory: ${{ matrix.pkg.dir }} | |
| run: | | |
| pytest -v \ | |
| --cov=bionemo \ | |
| --cov-report=xml:coverage.xml \ | |
| --junitxml=results.junit.xml \ | |
| -o junit_family=legacy \ | |
| . | |
| - name: Upload coverage to Codecov | |
| if: github.event_name != 'merge_group' && github.event_name != 'schedule' | |
| uses: codecov/codecov-action@v5 | |
| with: | |
| token: ${{ secrets.CODECOV_TOKEN }} | |
| flags: ${{ matrix.pkg.name }} | |
| files: coverage.xml | |
| - name: Upload test results to Codecov | |
| if: ${{ !cancelled() && github.event_name != 'merge_group' && github.event_name != 'schedule' }} | |
| uses: codecov/test-results-action@v1 | |
| with: | |
| token: ${{ secrets.CODECOV_TOKEN }} | |
| flags: ${{ matrix.pkg.name }} | |
| files: results.junit.xml | |
| run-tests-slow: | |
| needs: | |
| - pre-commit | |
| - changed-files | |
| - get-pr-labels | |
| runs-on: linux-amd64-gpu-l4-latest-1 | |
| if: | | |
| ( | |
| (github.event_name == 'schedule') || | |
| contains(fromJSON(needs.get-pr-labels.outputs.labels || '[]'), 'ciflow:all') || | |
| ( | |
| !contains(fromJSON(needs.get-pr-labels.outputs.labels || '[]'), 'ciflow:skip') && | |
| (needs.changed-files.outputs.any_changed == 'true') | |
| ) | |
| ) && | |
| ( | |
| (github.event_name == 'schedule') || | |
| (github.event_name == 'merge_group') || | |
| contains(fromJSON(needs.get-pr-labels.outputs.labels || '[]'), 'ciflow:all') || | |
| contains(fromJSON(needs.get-pr-labels.outputs.labels || '[]'), 'ciflow:slow') | |
| ) | |
| name: "slow-tests (${{ matrix.pkg.name }})" | |
| container: | |
| image: svcbionemo023/bionemo-framework:pytorch26.03-py3-squashed | |
| options: --shm-size=16G | |
| env: | |
| CI: true | |
| HF_TOKEN: ${{ secrets.HF_TOKEN }} | |
| HF_HOME: /cache/huggingface | |
| BIONEMO_DATA_SOURCE: ngc | |
| strategy: | |
| matrix: | |
| pkg: ${{ fromJson(needs.changed-files.outputs.matrix) }} | |
| fail-fast: false | |
| steps: | |
| - name: Show GPU info | |
| run: nvidia-smi | |
| - name: Setup proxy cache | |
| uses: nv-gha-runners/setup-proxy-cache@main | |
| - name: Checkout repository | |
| uses: actions/checkout@v4 | |
| with: | |
| sparse-checkout: | | |
| sub-packages | |
| LICENSE | |
| sparse-checkout-cone-mode: true | |
| - name: Install dependencies and package | |
| working-directory: ${{ matrix.pkg.dir }} | |
| env: | |
| DEPS: ${{ toJson(matrix.pkg.deps) }} | |
| run: | | |
| for dep in $(echo "$DEPS" | jq -r '.[]'); do | |
| PIP_CONSTRAINT= pip install -e "../../$dep" | |
| done | |
| if [ -f .ci_build.sh ]; then | |
| bash .ci_build.sh | |
| else | |
| PIP_CONSTRAINT= pip install -e . | |
| fi | |
| PIP_CONSTRAINT= pip install pytest pytest-cov pytest-timeout || true | |
| PIP_CONSTRAINT= pip install -e ".[test]" 2>/dev/null || true | |
| - name: Run slow tests | |
| working-directory: ${{ matrix.pkg.dir }} | |
| run: | | |
| pytest -v -m slow \ | |
| --junitxml=results-slow.junit.xml \ | |
| -o junit_family=legacy \ | |
| . || test $? -eq 5 # exit 5 = no tests collected, which is OK | |
| run-tests-notebooks: | |
| needs: | |
| - pre-commit | |
| - changed-files | |
| - get-pr-labels | |
| runs-on: linux-amd64-gpu-l4-latest-1 | |
| if: | | |
| ( | |
| (github.event_name == 'schedule') || | |
| contains(fromJSON(needs.get-pr-labels.outputs.labels || '[]'), 'ciflow:all') || | |
| ( | |
| !contains(fromJSON(needs.get-pr-labels.outputs.labels || '[]'), 'ciflow:skip') && | |
| (needs.changed-files.outputs.any_changed == 'true') | |
| ) | |
| ) && | |
| ( | |
| (github.event_name == 'schedule') || | |
| (github.event_name == 'merge_group') || | |
| contains(fromJSON(needs.get-pr-labels.outputs.labels || '[]'), 'ciflow:all') || | |
| contains(fromJSON(needs.get-pr-labels.outputs.labels || '[]'), 'ciflow:notebooks') | |
| ) | |
| name: "notebook-tests (${{ matrix.pkg.name }})" | |
| container: | |
| image: svcbionemo023/bionemo-framework:pytorch26.03-py3-squashed | |
| options: --shm-size=16G | |
| env: | |
| CI: true | |
| HF_TOKEN: ${{ secrets.HF_TOKEN }} | |
| HF_HOME: /cache/huggingface | |
| BIONEMO_DATA_SOURCE: ngc | |
| strategy: | |
| matrix: | |
| pkg: ${{ fromJson(needs.changed-files.outputs.matrix) }} | |
| fail-fast: false | |
| steps: | |
| - name: Show GPU info | |
| run: nvidia-smi | |
| - name: Setup proxy cache | |
| uses: nv-gha-runners/setup-proxy-cache@main | |
| - name: Checkout repository | |
| uses: actions/checkout@v4 | |
| with: | |
| sparse-checkout: | | |
| sub-packages | |
| LICENSE | |
| sparse-checkout-cone-mode: true | |
| - name: Install dependencies and package | |
| working-directory: ${{ matrix.pkg.dir }} | |
| env: | |
| DEPS: ${{ toJson(matrix.pkg.deps) }} | |
| run: | | |
| for dep in $(echo "$DEPS" | jq -r '.[]'); do | |
| PIP_CONSTRAINT= pip install -e "../../$dep" | |
| done | |
| if [ -f .ci_build.sh ]; then | |
| bash .ci_build.sh | |
| else | |
| PIP_CONSTRAINT= pip install -e . | |
| fi | |
| PIP_CONSTRAINT= pip install nbval testbook || true | |
| PIP_CONSTRAINT= pip install -e ".[test]" 2>/dev/null || true | |
| - name: Run notebook tests | |
| working-directory: ${{ matrix.pkg.dir }} | |
| run: | | |
| FAST_CI_MODE=true pytest -v --nbval-lax -x -p no:python . \ | |
| || test $? -eq 5 # exit 5 = no notebooks found, which is OK | |
| verify-tests-status: | |
| needs: | |
| - pre-commit | |
| - changed-files | |
| - get-pr-labels | |
| - run-tests | |
| - run-tests-slow | |
| - run-tests-notebooks | |
| runs-on: ubuntu-latest | |
| if: always() | |
| steps: | |
| - name: Check test job statuses | |
| run: | | |
| if [[ "${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') }}" == "true" ]]; then | |
| echo "Some test jobs have failed or been cancelled!" | |
| exit 1 | |
| else | |
| echo "All test jobs have completed successfully or been skipped!" | |
| exit 0 | |
| fi |