Sergidev commited on
Commit
b1f117d
1 Parent(s): f3be2ab

Cuda fix 2

Browse files

Try to fix cuda.

Files changed (1) hide show
  1. start.py +2 -6
start.py CHANGED
@@ -1,15 +1,11 @@
1
  import subprocess
2
- import os
3
 
4
  # Uninstall the existing llama-cpp-python package
5
  subprocess.run("pip uninstall -y llama-cpp-python", shell=True)
6
 
7
- # Set the required environment variables for installing with CUDA support
8
- os.environ["CMAKE_ARGS"] = "-DLLAMA_CUBLAS=on"
9
- os.environ["FORCE_CMAKE"] = "1"
10
-
11
  # Install llama-cpp-python with CUDA support
12
- subprocess.run("pip install llama-cpp-python", shell=True)
 
13
 
14
  # Start the Hugging Face Space
15
  subprocess.run("uvicorn app:app --host 0.0.0.0 --port 7860", shell=True)
 
1
  import subprocess
 
2
 
3
  # Uninstall the existing llama-cpp-python package
4
  subprocess.run("pip uninstall -y llama-cpp-python", shell=True)
5
 
 
 
 
 
6
  # Install llama-cpp-python with CUDA support
7
+ install_command = "CMAKE_ARGS='-DLLAMA_CUBLAS=on' pip install llama-cpp-python"
8
+ subprocess.run(install_command, shell=True)
9
 
10
  # Start the Hugging Face Space
11
  subprocess.run("uvicorn app:app --host 0.0.0.0 --port 7860", shell=True)