diff --git a/.github/workflows/test-check.yaml b/.github/workflows/test-check.yaml index 65bcb815467..9f668546d99 100644 --- a/.github/workflows/test-check.yaml +++ b/.github/workflows/test-check.yaml @@ -140,9 +140,14 @@ jobs: - name: "🔬 Running onnx tests" run: make test TARGETS=onnx pytorch-tests: - runs-on: ubuntu-22.04 + runs-on: aws-avx2-64G env: SPARSEZOO_TEST_MODE: "true" + CLEARML_WEB_HOST: ${{ secrets.CLEARML_WEB_HOST }} + CLEARML_API_HOST: ${{ secrets.CLEARML_API_HOST }} + CLEARML_API_ACCESS_KEY: ${{ secrets.CLEARML_API_ACCESS_KEY }} + CLEARML_FILES_HOST: ${{ secrets.CLEARML_FILES_HOST }} + CLEARML_API_SECRET_KEY: ${{ secrets.CLEARML_API_SECRET_KEY }} needs: test-setup if: ${{needs.test-setup.outputs.pytorch == 1}} steps: @@ -164,9 +169,14 @@ jobs: - name: "🔬 Running pytorch tests" run: make test TARGETS=pytorch compat-pytorch-1_9-pytorch-tests: - runs-on: ubuntu-22.04 + runs-on: aws-avx2-64G env: SPARSEZOO_TEST_MODE: "true" + CLEARML_WEB_HOST: ${{ secrets.CLEARML_WEB_HOST }} + CLEARML_API_HOST: ${{ secrets.CLEARML_API_HOST }} + CLEARML_API_ACCESS_KEY: ${{ secrets.CLEARML_API_ACCESS_KEY }} + CLEARML_FILES_HOST: ${{ secrets.CLEARML_FILES_HOST }} + CLEARML_API_SECRET_KEY: ${{ secrets.CLEARML_API_SECRET_KEY }} needs: test-setup if: ${{needs.test-setup.outputs.pytorch == 1}} steps: @@ -212,9 +222,14 @@ jobs: - name: "🔬 Running onnx tests" run: make test TARGETS=onnx transformers-tests: - runs-on: ubuntu-22.04 + runs-on: aws-avx2-64G env: SPARSEZOO_TEST_MODE: "true" + CLEARML_WEB_HOST: ${{ secrets.CLEARML_WEB_HOST }} + CLEARML_API_HOST: ${{ secrets.CLEARML_API_HOST }} + CLEARML_API_ACCESS_KEY: ${{ secrets.CLEARML_API_ACCESS_KEY }} + CLEARML_FILES_HOST: ${{ secrets.CLEARML_FILES_HOST }} + CLEARML_API_SECRET_KEY: ${{ secrets.CLEARML_API_SECRET_KEY }} needs: test-setup if: ${{needs.test-setup.outputs.transformers == 1}} steps: diff --git a/tests/sparseml/transformers/finetune/test_finetune.py b/tests/sparseml/transformers/finetune/test_finetune.py index b2a437751da..823ade908a4 100644 --- a/tests/sparseml/transformers/finetune/test_finetune.py +++ b/tests/sparseml/transformers/finetune/test_finetune.py @@ -141,7 +141,7 @@ def test_oneshot_then_finetune(tmp_path: Path): ) -def test_finetune_wout_recipe(tmp_path: Path): +def test_finetune_without_recipe(tmp_path: Path): recipe_str = None model = "Xenova/llama2.c-stories15M" device = "cuda:0" diff --git a/tests/sparseml/transformers/finetune/test_finetune_helpers.py b/tests/sparseml/transformers/finetune/test_finetune_helpers.py index 262788c7af9..3fde66276d9 100644 --- a/tests/sparseml/transformers/finetune/test_finetune_helpers.py +++ b/tests/sparseml/transformers/finetune/test_finetune_helpers.py @@ -26,7 +26,7 @@ def test_apply_recipe_structure(): model = AutoModelForCausalLM.from_pretrained(model_path) assert not qat_active(model) - recipe_with_quant = "tests/sparseml/transformers/obcq/test_tiny.yaml" + recipe_with_quant = "tests/sparseml/transformers/obcq/quant_and_sparse.yaml" apply_recipe_structure_to_model(model, recipe_with_quant, model_path) assert qat_active(model) diff --git a/tests/sparseml/test_clear_ml.py b/tests/sparseml/transformers/test_clear_ml.py similarity index 53% rename from tests/sparseml/test_clear_ml.py rename to tests/sparseml/transformers/test_clear_ml.py index 987d15a15fe..31ff945b495 100644 --- a/tests/sparseml/test_clear_ml.py +++ b/tests/sparseml/transformers/test_clear_ml.py @@ -14,46 +14,29 @@ from pathlib import Path -from clearml import Task -from sparseml.transformers import apply -from sparseml.utils import is_package_available - - -is_torch_available = is_package_available("torch") -if is_torch_available: - import torch +import torch - torch_err = None -else: - torch = object - torch_err = ModuleNotFoundError( - "`torch` is not installed, use `pip install torch` to log to Weights and Biases" - ) +from clearml import Task +from sparseml.transformers import train -def test_oneshot_and_finetune(tmp_path: Path): - recipe_str = "tests/sparseml/transformers/finetune/test_alternate_recipe.yaml" +def test_finetune_wout_recipe(tmp_path: Path): + recipe_str = None model = "Xenova/llama2.c-stories15M" device = "cuda:0" - if is_torch_available and not torch.cuda.is_available(): + if not torch.cuda.is_available(): device = "cpu" - dataset = "wikitext" - dataset_config_name = "wikitext-2-raw-v1" - concatenate_data = True - run_stages = True + dataset = "open_platypus" + concatenate_data = False output_dir = tmp_path max_steps = 50 - splits = {"train": "train[:50%]", "calibration": "train[50%:60%]"} + splits = "train" - # clearML will automatically log default capturing entries without - # explicitly calling logger. Logs accessible in https://app.clear.ml/ Task.init(project_name="test", task_name="test_oneshot_and_finetune") - apply( + train( model=model, dataset=dataset, - dataset_config_name=dataset_config_name, - run_stages=run_stages, output_dir=output_dir, recipe=recipe_str, max_steps=max_steps,