Spaces:
Sleeping
Sleeping
michelebasilico
commited on
Commit
•
be00868
1
Parent(s):
f4a906d
Update app.py
Browse files
app.py
CHANGED
@@ -1,5 +1,6 @@
|
|
1 |
import gradio as gr
|
2 |
from transformers import pipeline
|
|
|
3 |
|
4 |
# model="michelebasilico/itaca-mistral-7b-v2-4bit")
|
5 |
model = pipeline("text-generation",
|
@@ -10,12 +11,29 @@ start_instruction_token, end_instruction_token = "[INST]", "[/INST]"
|
|
10 |
system_prompt = "Sei un assistente utile ed affidabile. Rispondi in maniera adeguata alla domanda seguente: "
|
11 |
|
12 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
13 |
def predict(message, history):
|
14 |
new_message = start_instruction_token + system_prompt + \
|
15 |
message + end_instruction_token + start_completion
|
16 |
-
|
17 |
-
|
18 |
-
|
|
|
|
|
|
|
|
|
|
|
19 |
|
20 |
|
21 |
iface = gr.ChatInterface(predict)
|
|
|
1 |
import gradio as gr
|
2 |
from transformers import pipeline
|
3 |
+
import requests
|
4 |
|
5 |
# model="michelebasilico/itaca-mistral-7b-v2-4bit")
|
6 |
model = pipeline("text-generation",
|
|
|
11 |
system_prompt = "Sei un assistente utile ed affidabile. Rispondi in maniera adeguata alla domanda seguente: "
|
12 |
|
13 |
|
14 |
+
API_URL = "https://cyk11dj2ce5ybyjq.us-east-1.aws.endpoints.huggingface.cloud"
|
15 |
+
headers = {
|
16 |
+
"Accept": "application/json",
|
17 |
+
"Content-Type": "application/json"
|
18 |
+
}
|
19 |
+
|
20 |
+
|
21 |
+
def query(payload):
|
22 |
+
response = requests.post(API_URL, headers=headers, json=payload)
|
23 |
+
return response.json()
|
24 |
+
|
25 |
+
|
26 |
def predict(message, history):
|
27 |
new_message = start_instruction_token + system_prompt + \
|
28 |
message + end_instruction_token + start_completion
|
29 |
+
output = query({
|
30 |
+
"inputs": new_message,
|
31 |
+
"parameters": {
|
32 |
+
"max_new_tokens": 256,
|
33 |
+
"return_full_text": False
|
34 |
+
}
|
35 |
+
})
|
36 |
+
return output
|
37 |
|
38 |
|
39 |
iface = gr.ChatInterface(predict)
|