Skip to content

Commit

Permalink
udpate tests + add platform before docker run
Browse files Browse the repository at this point in the history
  • Loading branch information
ejm714 committed Oct 29, 2024
1 parent 7e97ea1 commit 2280362
Show file tree
Hide file tree
Showing 5 changed files with 58 additions and 94 deletions.
6 changes: 5 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ CPU_OR_GPU ?= gpu
endif

BLOCK_INTERNET ?= true
PLATFORM_ARGS = --platform linux/amd64

TAG := ${CPU_OR_GPU}-latest
LOCAL_TAG := ${CPU_OR_GPU}-local
Expand Down Expand Up @@ -127,7 +128,7 @@ update-lockfile: runtime/pixi.lock
--tag pixi-lock:local
@echo Running lock container
docker run \
--platform linux/amd64 \
${PLATFORM_ARGS} \
--mount type=bind,source="$(shell pwd)"/runtime/pixi.toml,target=/tmp/pixi.toml \
--mount type=bind,source="$(shell pwd)"/runtime/pixi.lock,target=/tmp/pixi.lock \
--rm \
Expand All @@ -136,6 +137,7 @@ update-lockfile: runtime/pixi.lock
## Ensures that your locally built image can import all the Python packages successfully when it runs
test-container: _check_image _echo_image _submission_write_perms
docker run \
${PLATFORM_ARGS} \
${GPU_ARGS} \
${NETWORK_ARGS} \
${TTY_ARGS} \
Expand All @@ -147,6 +149,7 @@ test-container: _check_image _echo_image _submission_write_perms
## Open an interactive bash shell within the running container (with network access)
interact-container: _check_image _echo_image _submission_write_perms
docker run \
${PLATFORM_ARGS} \
${GPU_ARGS} \
${NETWORK_ARGS} \
--mount type=bind,source=${shell pwd}/data,target=/code_execution/data,readonly \
Expand Down Expand Up @@ -188,6 +191,7 @@ ifeq (,$(wildcard ./submission/submission.zip))
If you want to use the benchmark, you can run `make pack-example <benchmark name>` first)
endif
docker run \
${PLATFORM_ARGS} \
${TTY_ARGS} \
${GPU_ARGS} \
${NETWORK_ARGS} \
Expand Down
49 changes: 49 additions & 0 deletions runtime/pixi.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions runtime/pixi.toml
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ platforms = ["linux-64"]

# conda package dependencies
[dependencies]
accelerate = "1.0.1"
einops = "0.8.0"
langchain = "0.3.*"
librosa = ">=0.10.2.post1,<0.11"
Expand Down
46 changes: 0 additions & 46 deletions runtime/tests/test_lockfile.py

This file was deleted.

50 changes: 3 additions & 47 deletions runtime/tests/test_packages.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,16 +4,15 @@
import pytest

packages = [
"gensim",
"keras",
"numpy",
"pandas",
"scipy",
"spacy",
"sklearn",
"tensorflow",
"torch",
"torchaudio",
"transformers",
"whisper",
"speechbrain",
]


Expand Down Expand Up @@ -41,46 +40,3 @@ def test_allocate_torch():
assert torch.cuda.is_available()

torch.zeros(1).cuda()


@pytest.mark.skipif(not GPU_AVAILABLE, reason="No GPU available")
def test_allocate_tf():
import tensorflow as tf

assert tf.test.is_built_with_cuda()
assert (devices := tf.config.list_logical_devices("GPU"))

for device in devices:
with tf.device(device.name):
tf.constant([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]])


@pytest.mark.skipif(not GPU_AVAILABLE, reason="No GPU available")
def test_allocate_cupy():
import cupy as cp

cp.array([1, 2, 3, 4, 5, 6])


def test_spacy():
import spacy
from spacy.tokens import DocBin

if GPU_AVAILABLE:
spacy.require_gpu()

nlp = spacy.blank("en")
training_data = [
("Tokyo Tower is 333m tall.", [(0, 11, "BUILDING")]),
]

# the DocBin will store the example documents
db = DocBin()
for text, annotations in training_data:
doc = nlp(text)
ents = []
for start, end, label in annotations:
span = doc.char_span(start, end, label=label)
ents.append(span)
doc.ents = ents
db.add(doc)

0 comments on commit 2280362

Please sign in to comment.