|
import gradio as gr |
|
import pandas as pd |
|
from transformers import pipeline, AutoModelForCausalLM, AutoTokenizer |
|
|
|
|
|
model_name = "EleutherAI/gpt-neo-2.7B" |
|
tokenizer = AutoTokenizer.from_pretrained(model_name) |
|
model = AutoModelForCausalLM.from_pretrained(model_name) |
|
generator = pipeline("text-generation", model=model, tokenizer=tokenizer) |
|
|
|
def generate_solutions(query): |
|
|
|
responses = generator(query, max_length=100, num_return_sequences=3) |
|
|
|
|
|
solutions = [{"Solution": response['generated_text'].strip(), "Link": "https://example.com"} for response in responses] |
|
|
|
|
|
df = pd.DataFrame(solutions) |
|
|
|
|
|
table_html = df.to_html(escape=False, index=False, render_links=True) |
|
|
|
return table_html |
|
|
|
|
|
iface = gr.Interface( |
|
fn=generate_solutions, |
|
inputs=gr.Textbox(lines=2, placeholder="Describe the problem with the machine..."), |
|
outputs=gr.HTML(), |
|
title="Oroz: Your Industry Maintenance Assistant", |
|
description="Describe the problem with your machine, and get an organized table of suggested solutions with web links." |
|
) |
|
|
|
iface.launch(share=True) |
|
|
|
|