import gradio as gr
import torch
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
from peft import PeftModel, PeftConfig
# Load the fine-tuned model and tokenizer
model_name = "sagar007/phi-1_5-finetuned" # Updated model path
config = PeftConfig.from_pretrained(model_name)
# Check if CUDA is available
if torch.cuda.is_available():
device_map = "auto"
torch_dtype = torch.float16
else:
device_map = "cpu"
torch_dtype = torch.float32
model = AutoModelForCausalLM.from_pretrained(
config.base_model_name_or_path,
torch_dtype=torch_dtype,
device_map=device_map
)
model = PeftModel.from_pretrained(model, model_name)
tokenizer = AutoTokenizer.from_pretrained(model_name)
# Create a text generation pipeline
generator = pipeline("text-generation", model=model, tokenizer=tokenizer, device_map=device_map)
def generate_text(prompt, max_length=100, temperature=0.7, top_p=0.9):
"""Generate text based on the input prompt."""
if not prompt.strip():
return "Please enter a prompt before generating text."
try:
generated = generator(prompt, max_length=max_length, do_sample=True, temperature=temperature, top_p=top_p)
return generated[0]['generated_text']
except Exception as e:
return f"An error occurred: {str(e)}"
# Custom CSS for styling (unchanged)
custom_css = """
... (your existing CSS)
"""
# Create the Gradio interface
with gr.Blocks(css=custom_css) as iface:
gr.HTML("
")
gr.HTML("
🤖 Phi-1.5 Fine-tuned Text Generator
")
with gr.Row():
with gr.Column():
input_text = gr.Textbox(lines=5, label="Enter your prompt")
max_length = gr.Slider(minimum=50, maximum=500, value=100, step=10, label="Max Length")
temperature = gr.Slider(minimum=0.1, maximum=1.0, value=0.7, step=0.1, label="Temperature")
top_p = gr.Slider(minimum=0.1, maximum=1.0, value=0.9, step=0.1, label="Top P")
generate_button = gr.Button("Generate Text", variant="primary")
with gr.Column():
output_text = gr.Textbox(lines=10, label="Generated Text")
# Add example prompts
gr.Examples(
examples=[
"Explain the concept of machine learning.",
"Write a short story about a robot learning to paint.",
"What are some effective ways to reduce stress?",
"Summarize the key points of climate change in simple terms.",
"Create a step-by-step guide for making a perfect omelette.",
"Describe the differences between classical and quantum computing.",
"Write a motivational speech for a team starting a new project.",
"Explain the importance of biodiversity in ecosystems.",
"Compose a haiku about artificial intelligence.",
"List five tips for effective time management.",
"Describe the process of photosynthesis in layman's terms.",
"Write a dialogue between two characters discussing the future of space exploration.",
"Explain the concept of blockchain technology and its potential applications."
],
inputs=input_text
)
generate_button.click(
generate_text,
inputs=[input_text, max_length, temperature, top_p],
outputs=output_text
)
gr.HTML("")
gr.HTML("
This model is a fine-tuned version of Phi-1.5, trained on the OpenAssistant dataset.
")
gr.HTML("
")
# Launch the app
iface.launch()