transart / gen_ai_project_f.py
pravin007s's picture
Upload gen_ai_project_f.py
47454a7 verified
raw
history blame
4.46 kB
# -*- coding: utf-8 -*-
"""gen ai project f.ipynb
Automatically generated by Colab.
Original file is located at
https://colab.research.google.com/drive/1iF7hdOjWNeFUtGvUYdaFsBErJGnY1h5J
"""
# Install necessary packages
!pip install transformers torch diffusers streamlit gradio huggingface_hub
!pip install pyngrok # For exposing the app to the public
!pip install sacremoses
!pip install sentencepiece
from huggingface_hub import login
login(token="hf_gen")
!pip install requests
!pip install Pillow
# Import necessary libraries
from transformers import MarianMTModel, MarianTokenizer, pipeline
# Load the translation model and tokenizer
model_name = "Helsinki-NLP/opus-mt-mul-en"
tokenizer = MarianTokenizer.from_pretrained(model_name)
model = MarianMTModel.from_pretrained(model_name)
# Create a translation pipeline
translator = pipeline("translation", model=model, tokenizer=tokenizer)
# Function for translation
def translate_text(tamil_text):
try:
# Perform translation
translation = translator(tamil_text, max_length=40)
translated_text = translation[0]['translation_text']
return translated_text
except Exception as e:
return f"An error occurred: {str(e)}"
# Test translation with example Tamil text
tamil_text = "மழையுடன் ஒரு பூ" # "A flower with rain"
translated_text = translate_text(tamil_text)
print(f"Translated Text: {translated_text}")
import requests
import io
from PIL import Image
import matplotlib.pyplot as plt
# API credentials and endpoint
API_URL = "https://api-inference.huggingface.co/models/black-forest-labs/FLUX.1-dev"
headers = {"Authorization": "Bearer hf_gen"}
# Function to send payload and generate image
def generate_image(prompt):
try:
# Send request to API
response = requests.post(API_URL, headers=headers, json={"inputs": prompt})
# Check if the response is successful
if response.status_code == 200:
print("API call successful, generating image...")
image_bytes = response.content
# Try opening the image
try:
image = Image.open(io.BytesIO(image_bytes))
return image
except Exception as e:
print(f"Error opening image: {e}")
else:
# Handle non-200 responses
print(f"Failed to get image: Status code {response.status_code}")
print("Response content:", response.text) # Print response for debugging
except Exception as e:
print(f"An error occurred: {e}")
# Display image
def show_image(image):
if image:
plt.imshow(image)
plt.axis('off') # Hide axes
plt.show()
else:
print("No image to display")
# Test the function with a prompt
prompt = "A flower with rain"
image = generate_image(prompt)
# Display the generated image
show_image(image)
from transformers import AutoTokenizer, AutoModelForCausalLM
# Load GPT-Neo model for creative text generation
gpt_neo_tokenizer = AutoTokenizer.from_pretrained("EleutherAI/gpt-neo-125M")
gpt_neo_model = AutoModelForCausalLM.from_pretrained("EleutherAI/gpt-neo-125M")
# Function to generate creative text based on translated text
def generate_creative_text(translated_text):
input_ids = gpt_neo_tokenizer(translated_text, return_tensors='pt').input_ids
generated_text_ids = gpt_neo_model.generate(input_ids, max_length=100)
creative_text = gpt_neo_tokenizer.decode(generated_text_ids[0], skip_special_tokens=True)
return creative_text
import gradio as gr
# Function to handle the full workflow
def translate_generate_image_and_text(tamil_text):
# Step 1: Translate Tamil text to English
translated_text = translate_text(tamil_text)
# Step 2: Generate an image based on the translated text
image = generate_image(translated_text)
# Step 3: Generate creative text based on the translated text
creative_text = generate_creative_text(translated_text)
return translated_text, creative_text, image
# Create Gradio interface
interface = gr.Interface(
fn=translate_generate_image_and_text,
inputs="text",
outputs=["text", "text", "image"],
title="Tamil to English Translation, Image Generation & Creative Text",
description="Enter Tamil text to translate to English, generate an image, and create creative text based on the translation."
)
# Launch Gradio app
interface.launch()