Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
import torch
|
2 |
from PIL import Image
|
3 |
import gradio as gr
|
4 |
-
import spaces
|
5 |
from transformers import AutoModelForCausalLM, AutoTokenizer, BitsAndBytesConfig, TextIteratorStreamer
|
6 |
import os
|
7 |
from threading import Thread
|
@@ -58,7 +58,7 @@ model = AutoModelForCausalLM.from_pretrained(
|
|
58 |
)
|
59 |
tokenizer = AutoTokenizer.from_pretrained(MODELS)
|
60 |
|
61 |
-
|
62 |
def stream_chat(message: str, history: list, temperature: float, max_new_tokens: int):
|
63 |
print(f'message is - {message}')
|
64 |
print(f'history is - {history}')
|
|
|
1 |
import torch
|
2 |
from PIL import Image
|
3 |
import gradio as gr
|
4 |
+
#import spaces
|
5 |
from transformers import AutoModelForCausalLM, AutoTokenizer, BitsAndBytesConfig, TextIteratorStreamer
|
6 |
import os
|
7 |
from threading import Thread
|
|
|
58 |
)
|
59 |
tokenizer = AutoTokenizer.from_pretrained(MODELS)
|
60 |
|
61 |
+
#@spaces.GPU()
|
62 |
def stream_chat(message: str, history: list, temperature: float, max_new_tokens: int):
|
63 |
print(f'message is - {message}')
|
64 |
print(f'history is - {history}')
|