|
import gradio as gr |
|
from huggingface_hub import InferenceClient |
|
|
|
from datasets import load_dataset |
|
import pandas as pd |
|
|
|
df = None |
|
|
|
|
|
def load_new_dataset(): |
|
global df |
|
|
|
gr.Info(message="Loading dataset...") |
|
|
|
|
|
ds = pd.read_csv("hf://datasets/fka/awesome-chatgpt-prompts/prompts.csv") |
|
df = pd.DataFrame(ds) |
|
|
|
def run_query(input: str): |
|
try: |
|
df_results = df[df['act'].str.contains(input, case=False, na=False)] |
|
|
|
logging_message = f"Results for '{input}' found." |
|
except Exception as e: |
|
raise gr.Error(f"Error running query: {e}") |
|
|
|
return df_results, logging_message |
|
|
|
|
|
|
|
with gr.Blocks() as demo: |
|
text_input = gr.Textbox(visible=True, label="Enter value to generate a prompt for an 'actor' (for instance, developer):") |
|
btn_run = gr.Button(visible=True, value="Search") |
|
results_output = gr.Dataframe(label="Results", visible=True, wrap=True) |
|
logging_output = gr.Label(visible="True", value="") |
|
|
|
btn_run.click( |
|
fn=run_query, |
|
inputs=text_input, |
|
outputs=[results_output, logging_output] |
|
) |
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
load_new_dataset() |
|
demo.launch() |
|
|