fanyang
+ app.py
217d8ad
raw
history blame
3.71 kB
import json
import os
import requests
import gradio as gr
# Environment variables for backend URL and model name
BACKEND_URL = os.getenv('BACKEND_URL','')
MODEL_NAME = os.getenv('MODEL_NAME')
API_KEY = os.getenv('API_KEY')
# Custom headers for the API request
HEADERS = {
'orionstar-api-key': API_KEY,
'Content-Type': 'application/json'
}
def clear_session():
"""Clears the chat session."""
return '', None
def chat_stream_generator(url, payload):
"""Generator function to stream chat responses from the backend."""
answer = ''
with requests.post(url, json=payload, headers=HEADERS, stream=True) as response:
if response.encoding is None:
response.encoding = 'utf-8'
for line in response.iter_lines(decode_unicode=True):
if line:
line = line.replace('data: ', '')
if line != '[DONE]':
data = json.loads(line)
if 'choices' in data and data['choices']:
choice = data['choices'][0]
if 'delta' in choice and choice['delta'].get('content'):
answer += choice['delta']['content']
yield answer
def generate_chat(input_text: str, history=None):
"""Generates chat responses and updates the chat history."""
if input_text is None:
input_text = ''
if history is None:
history = []
history = history[-5:] # Keep the last 5 messages in history
url = BACKEND_URL
payload = {
"model": MODEL_NAME,
"stream": True,
"messages": [
{"role": "system", "content": "你是一个AI助手,请用专业的知识解答我的问题"},
{"role": "user", "content": input_text}
]
}
gen = chat_stream_generator(url, payload)
for response in gen:
history.append((input_text, response))
yield None, history
history.pop()
history.append((input_text, response))
return None, gen
# Gradio interface
block = gr.Blocks()
with block as demo:
gr.Markdown("<center><h1>猎户星空-Yi-34B-Chat对话模型演示</h1></center>")
gr.Markdown("""
* Yi系列模型是由零一万物团队开源的大模型,在多个权威的中文、英文及通用领域 benchmark 上取得不错的效果。
* [猎户星空](https://www.orionstar.com/)推出的Orionstar-Yi-34B-Chat更进一步挖掘了Yi-34B的潜力。通过对大量高质量微调语料库的深度训练,我们致力于将其打造成为ChatGPT领域中的杰出开源替代品。
* Orionstar-Yi-34B-Chat,在C-Eval、MMLU、CMMLU等主流测评集上表现出色,显著领先于国内其他开源对话模型(截止2023年11月)。与其他开源模型的详细对比请查看[这里](https://github.com/OrionStarAI/OrionStar-Yi-34B-Chat)。
* 请在[Gihub](https://github.com/OrionStarAI/OrionStar-Yi-34B-Chat)点击Star支持我们。
* 加入[官方微信交流群](https://cdn.jsdelivr.net/gh/OrionStarAI/OrionStar-Yi-34B-Chat@main/pics/wechat_group.jpg)""")
chatbot = gr.Chatbot(label='OrionStar-Yi-34B-Chat', elem_classes="control-height")
message = gr.Textbox(label='Input')
with gr.Row():
submit = gr.Button("🚀 Submit")
clear_history = gr.Button("🧹 Clear History")
submit.click(
fn=generate_chat,
inputs=[message, chatbot],
outputs=[message, chatbot]
)
clear_history.click(
fn=clear_session,
inputs=[],
outputs=[message, chatbot],
queue=False
)
demo.queue(api_open=False).launch(server_name='0.0.0.0', height=800, share=False,show_api=False)