Oroz / app.py
zcodel's picture
Update app.py
c827f23 verified
import gradio as gr
import pandas as pd
from transformers import pipeline, AutoModelForCausalLM, AutoTokenizer
# Initialize the Hugging Face pipeline with GPT-4 model
model_name = "EleutherAI/gpt-neo-2.7B" # Change to your desired GPT-4 model
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name)
generator = pipeline("text-generation", model=model, tokenizer=tokenizer)
def generate_solutions(query):
# Use the language model to generate solutions
responses = generator(query, max_length=100, num_return_sequences=3)
# Extract the generated texts
solutions = [{"Solution": response['generated_text'].strip(), "Link": "https://example.com"} for response in responses]
# Convert solutions to a DataFrame
df = pd.DataFrame(solutions)
# Convert DataFrame to HTML table with clickable links
table_html = df.to_html(escape=False, index=False, render_links=True)
return table_html
# Create a Gradio interface
iface = gr.Interface(
fn=generate_solutions,
inputs=gr.Textbox(lines=2, placeholder="Describe the problem with the machine..."),
outputs=gr.HTML(),
title="Oroz: Your Industry Maintenance Assistant",
description="Describe the problem with your machine, and get an organized table of suggested solutions with web links."
)
iface.launch(share=True)