JambaChatbot / app.py
Alanturner2's picture
Create app.py
b6b3f9e verified
raw
history blame
1.59 kB
import os
import gradio as gr
from langchain_ai21 import ChatAI21
from ai21 import AI21Client
from ai21.models.chat import ChatMessage, DocumentSchema
# Set your AI21 API key
os.environ["AI21_API_KEY"] = "8T6NvXgGjhtlh9bh65jsNqb584BOorNM"
client = AI21Client(api_key="8T6NvXgGjhtlh9bh65jsNqb584BOorNM")
# Initialize the Jamba model
chatbot = ChatAI21(model="jamba-instruct", temperature=0.7)
# Define the function to handle chat
def chatbot_response(user_input):
# Wrap input into a dictionary with the expected format
messages = [ChatMessage(role='system', content='You are a concise factual based question answering assistant.'),
ChatMessage(role='user', content=user_input)
]
response = client.chat.completions.create(messages=messages,
model='jamba-1.5-large',
# max_tokens=4096,
# temperature=0.4,
# top_p=1.0,
# stop = [], ## ['####', '\n'],
# n=1,
# stream = False
)
return response.choices[0].message.content
# Create the Gradio interface
interface = gr.Interface(
fn=chatbot_response,
inputs="text",
outputs="text",
title="Jamba Chatbot",
description="A simple chatbot using AI21 Labs' Jamba technology."
)
# Launch the Gradio app
if __name__ == "__main__":
interface.launch()