Skip to content

Commit

Permalink
turbine models ci
Browse files Browse the repository at this point in the history
  • Loading branch information
dan-garvey committed Nov 21, 2023
1 parent 372a1c1 commit 6d9c02d
Show file tree
Hide file tree
Showing 5 changed files with 63 additions and 49 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ jobs:
- name: Run tests
run: |
pytest
pytest tests/
black:
strategy:
Expand Down
42 changes: 42 additions & 0 deletions .github/workflows/test_models.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
name: Test

on:
workflow_dispatch:
pull_request:
push:
branches:
- main

jobs:
test:
strategy:
matrix:
version: [3.11]
os: [nodai-ubuntu-builder-large]

runs-on: ${{matrix.os}}
steps:
- name: "Setting up Python"
uses: actions/setup-python@75f3110429a8c05be0e1bf360334e4cced2b63fa # v2.3.3
with:
python-version: ${{matrix.version}}

- name: "Checkout Code"
uses: actions/checkout@v2

- name: Sync source deps
run: |
python -m pip install --upgrade pip
# Note: We install in three steps in order to satisfy requirements
# from non default locations first. Installing the PyTorch CPU
# wheels saves multiple minutes and a lot of bandwidth on runner setup.
pip install --index-url https://download.pytorch.org/whl/cpu \
-r pytorch-cpu-requirements.txt \
-r torchvision-requirements.txt
pip install --upgrade -r requirements.txt
pip install -e .[testing]
pip install -r turbine-models-requirements.txt
- name: Run tests
run: |
pytest python/turbine_models/tests
20 changes: 20 additions & 0 deletions python/turbine_models/tests/llama_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
# Copyright 2023 Nod Labs, Inc
#
# Licensed under the Apache License v2.0 with LLVM Exceptions.
# See https://llvm.org/LICENSE.txt for license information.
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception

import logging
import turbine_models.custom_models.stateless_llama as llama
import unittest
import os

class LLamaTest(unittest.TestCase):
def testExportTransformerModel(self):
llama.export_transformer_model("meta-llama/Llama-2-7b-chat-hf", "meta-llama/Llama-2-7b-chat-hf", "torch", "safetensors", "llama_f32.safetensors", None, "f32")
os.remove("llama_f32.safetensors")


if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main()
17 changes: 0 additions & 17 deletions tests/models/hf_dict.py

This file was deleted.

31 changes: 0 additions & 31 deletions tests/models/transformer_builder_tests.py

This file was deleted.

0 comments on commit 6d9c02d

Please sign in to comment.