Skip to content

Commit

Permalink
Implement NO_KERNELS flag and update torch requirement (#582)
Browse files Browse the repository at this point in the history
Co-authored-by: devin-ai-integration[bot] <158243242+devin-ai-integration[bot]@users.noreply.github.com>
  • Loading branch information
devin-ai-integration[bot] authored Aug 28, 2024
1 parent 79258d6 commit 8d903b2
Showing 1 changed file with 3 additions and 2 deletions.
5 changes: 3 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ def get_kernels_whl_url(

AUTOAWQ_VERSION = "0.2.6"
PYPI_BUILD = os.getenv("PYPI_BUILD", "0") == "1"
NO_KERNELS = int(os.getenv("NO_KERNELS", "0"))
IS_CPU_ONLY = not torch.backends.mps.is_available() and not torch.cuda.is_available()

CUDA_VERSION = os.getenv("CUDA_VERSION", None) or torch.version.cuda
Expand Down Expand Up @@ -86,7 +87,7 @@ def get_kernels_whl_url(
}

requirements = [
"torch==2.3.1",
"torch>=2.3.1",
"transformers>=4.35.0",
"tokenizers>=0.12.1",
"typing_extensions>=4.8.0",
Expand All @@ -107,7 +108,7 @@ def get_kernels_whl_url(

# kernels can be downloaded from pypi for cuda+121 only
# for everything else, we need to download the wheels from github
if not KERNELS_INSTALLED and (CUDA_VERSION or ROCM_VERSION):
if not KERNELS_INSTALLED and (CUDA_VERSION or ROCM_VERSION) and not NO_KERNELS:
if CUDA_VERSION and CUDA_VERSION.startswith("12"):
requirements.append("autoawq-kernels")
elif CUDA_VERSION and CUDA_VERSION.startswith("11") or ROCM_VERSION in ["571"]:
Expand Down

0 comments on commit 8d903b2

Please sign in to comment.