Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -5,12 +5,7 @@ from PIL import Image
|
|
5 |
from transformers import MllamaForConditionalGeneration, AutoProcessor
|
6 |
|
7 |
from huggingface_hub import login
|
8 |
-
login("token_spaces")
|
9 |
-
|
10 |
-
# ckpt = "meta-llama/Llama-3.2-11B-Vision-Instruct"
|
11 |
-
# model = MllamaForConditionalGeneration.from_pretrained(ckpt,
|
12 |
-
# torch_dtype=torch.bfloat16).to("cuda")
|
13 |
-
# processor = AutoProcessor.from_pretrained(ckpt)
|
14 |
|
15 |
# Load the Llama 3.2 Vision Model
|
16 |
def load_llama_model():
|
|
|
5 |
from transformers import MllamaForConditionalGeneration, AutoProcessor
|
6 |
|
7 |
from huggingface_hub import login
|
8 |
+
login("token_spaces", add_to_git_credential=True)
|
|
|
|
|
|
|
|
|
|
|
9 |
|
10 |
# Load the Llama 3.2 Vision Model
|
11 |
def load_llama_model():
|