From 9c7e1a72eedf7d9666a3bbf977f715b2d42da339 Mon Sep 17 00:00:00 2001 From: Phuc Nguyen Date: Tue, 13 Feb 2024 12:21:11 +0000 Subject: [PATCH] add small run --- .github/workflows/small.yaml | 57 ++++++++++++++++++++++++++++++++++++ 1 file changed, 57 insertions(+) create mode 100644 .github/workflows/small.yaml diff --git a/.github/workflows/small.yaml b/.github/workflows/small.yaml new file mode 100644 index 00000000..cc9b6158 --- /dev/null +++ b/.github/workflows/small.yaml @@ -0,0 +1,57 @@ +name: Run this shit + +on: + push: + branches: [ main ] + # Only run tests if we modify the following files + paths: + - "src/**/*.py" + - "examples/**/*.py" + - "tests/**/*.py" + + pull_request: + branches: [ '**' ] + paths: + - "src/**/*.py" + - "examples/**/*.py" + - "tests/**/*.py" + +jobs: + tests: + runs-on: [multi-gpu, nvidia-gpu, 8-t4, ci] + container: + image: runpod/pytorch:2.1.1-py3.10-cuda12.1.1-devel-ubuntu22.04 + ports: + - 80 + options: --gpus all --shm-size "8G" + steps: + - uses: actions/checkout@v3 + - name: Python environment + run: | + which python + python --version + + - name: Check Pytorch version + run: | + nvidia-smi + python -c "import torch; print('torch:', torch.__version__, torch)" + python -c "import torch; print('CUDA available:', torch.cuda.is_available())" + + - name: Instal nanotron + run: | + python -m pip install --upgrade pip + pip install packaging + pip install wheel + git clone https://github.com/huggingface/nanotron.git + cd nanotron + pip install -e . + pip install -e .[dev] + pip install pytest==7.4.0 pluggy==1.0.0 + + - name: Show installed libraries and their versions + run: pip freeze | tee installed.txt + + - name: Run tests + # NOTE: -m "not fa2" will run all the unit tests that don't have the mark + # "fa2" (these are FA2-related tests, we can't run it on T4) + run: pytest --color=yes --durations=0 --verbose tests/test_clip_grads.py