import gradio as gr import requests # Function to interact with the Ollama backend def chat_with_ollama(message): try: # Ollama server running on localhost:11434 ollama_endpoint = "http://localhost:11434/api/generate" payload = {"message": message} response = requests.post(ollama_endpoint, json=payload) response_data = response.json() if response.status_code == 200: return response_data.get("response", "No response from Ollama.") else: return f"Error: {response_data.get('error', 'Unknown error')}" except Exception as e: return f"An error occurred: {str(e)}" # Gradio interface iface = gr.Interface( fn=chat_with_ollama, inputs=gr.Textbox(lines=2, placeholder="Type your message..."), outputs="text", title="Gradio Chatbot with Ollama Backend" ) if __name__ == "__main__": iface.launch(server_name="0.0.0.0", server_port=7860)