Spaces:
Runtime error
Runtime error
Update DESCRIPTION in app.py
Browse files
app.py
CHANGED
@@ -14,6 +14,10 @@ zero = torch.Tensor([0]).cuda()
|
|
14 |
print(zero.device) # <-- 'cpu' 🤔
|
15 |
|
16 |
DESCRIPTION = '''
|
|
|
|
|
|
|
|
|
17 |
'''
|
18 |
|
19 |
LICENSE = """
|
@@ -43,7 +47,7 @@ terminators = [
|
|
43 |
tokenizer.convert_tokens_to_ids("<|eot_id|>")
|
44 |
]
|
45 |
|
46 |
-
@spaces.GPU
|
47 |
def chat_taide_8b(message: str,
|
48 |
history: list,
|
49 |
temperature: float,
|
|
|
14 |
print(zero.device) # <-- 'cpu' 🤔
|
15 |
|
16 |
DESCRIPTION = '''
|
17 |
+
<div>
|
18 |
+
<h1 style="text-align: center;">TAIDE/Llama3-TAIDE-LX-8B-Chat-Alpha1</h1>
|
19 |
+
<p>This Space demonstrates the instruction-tuned model <a href="https://huggingface.co/taide/Llama3-TAIDE-LX-8B-Chat-Alpha1"><b>Llama3-TAIDE-LX-8B-Chat-Alpha1</b></a>. Llama3-TAIDE-LX-8B is the new open LLM and comes in one sizes: 8b. Feel free to play with it, or duplicate to run privately!</p>
|
20 |
+
</div>
|
21 |
'''
|
22 |
|
23 |
LICENSE = """
|
|
|
47 |
tokenizer.convert_tokens_to_ids("<|eot_id|>")
|
48 |
]
|
49 |
|
50 |
+
@spaces.GPU
|
51 |
def chat_taide_8b(message: str,
|
52 |
history: list,
|
53 |
temperature: float,
|