File size: 507 Bytes
e37abe2 f3be2ab 2800109 f3be2ab |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 |
import subprocess
import os
# Uninstall the existing llama-cpp-python package
subprocess.run("pip uninstall -y llama-cpp-python", shell=True)
# Set the required environment variables for installing with CUDA support
os.environ["CMAKE_ARGS"] = "-DLLAMA_CUBLAS=on"
os.environ["FORCE_CMAKE"] = "1"
# Install llama-cpp-python with CUDA support
subprocess.run("pip install llama-cpp-python", shell=True)
# Start the Hugging Face Space
subprocess.run("uvicorn app:app --host 0.0.0.0 --port 7860", shell=True)
|