pravin007s commited on
Commit
47454a7
·
verified ·
1 Parent(s): 4581523

Upload gen_ai_project_f.py

Browse files
Files changed (1) hide show
  1. gen_ai_project_f.py +137 -0
gen_ai_project_f.py ADDED
@@ -0,0 +1,137 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """gen ai project f.ipynb
3
+
4
+ Automatically generated by Colab.
5
+
6
+ Original file is located at
7
+ https://colab.research.google.com/drive/1iF7hdOjWNeFUtGvUYdaFsBErJGnY1h5J
8
+ """
9
+
10
+ # Install necessary packages
11
+ !pip install transformers torch diffusers streamlit gradio huggingface_hub
12
+ !pip install pyngrok # For exposing the app to the public
13
+ !pip install sacremoses
14
+ !pip install sentencepiece
15
+
16
+ from huggingface_hub import login
17
+
18
+ login(token="hf_gen")
19
+
20
+ !pip install requests
21
+ !pip install Pillow
22
+
23
+ # Import necessary libraries
24
+ from transformers import MarianMTModel, MarianTokenizer, pipeline
25
+
26
+ # Load the translation model and tokenizer
27
+ model_name = "Helsinki-NLP/opus-mt-mul-en"
28
+ tokenizer = MarianTokenizer.from_pretrained(model_name)
29
+ model = MarianMTModel.from_pretrained(model_name)
30
+
31
+ # Create a translation pipeline
32
+ translator = pipeline("translation", model=model, tokenizer=tokenizer)
33
+
34
+ # Function for translation
35
+ def translate_text(tamil_text):
36
+ try:
37
+ # Perform translation
38
+ translation = translator(tamil_text, max_length=40)
39
+ translated_text = translation[0]['translation_text']
40
+ return translated_text
41
+ except Exception as e:
42
+ return f"An error occurred: {str(e)}"
43
+
44
+ # Test translation with example Tamil text
45
+ tamil_text = "மழையுடன் ஒரு பூ" # "A flower with rain"
46
+ translated_text = translate_text(tamil_text)
47
+ print(f"Translated Text: {translated_text}")
48
+
49
+ import requests
50
+ import io
51
+ from PIL import Image
52
+ import matplotlib.pyplot as plt
53
+
54
+ # API credentials and endpoint
55
+ API_URL = "https://api-inference.huggingface.co/models/black-forest-labs/FLUX.1-dev"
56
+ headers = {"Authorization": "Bearer hf_gen"}
57
+
58
+ # Function to send payload and generate image
59
+ def generate_image(prompt):
60
+ try:
61
+ # Send request to API
62
+ response = requests.post(API_URL, headers=headers, json={"inputs": prompt})
63
+
64
+ # Check if the response is successful
65
+ if response.status_code == 200:
66
+ print("API call successful, generating image...")
67
+ image_bytes = response.content
68
+
69
+ # Try opening the image
70
+ try:
71
+ image = Image.open(io.BytesIO(image_bytes))
72
+ return image
73
+ except Exception as e:
74
+ print(f"Error opening image: {e}")
75
+ else:
76
+ # Handle non-200 responses
77
+ print(f"Failed to get image: Status code {response.status_code}")
78
+ print("Response content:", response.text) # Print response for debugging
79
+
80
+ except Exception as e:
81
+ print(f"An error occurred: {e}")
82
+
83
+ # Display image
84
+ def show_image(image):
85
+ if image:
86
+ plt.imshow(image)
87
+ plt.axis('off') # Hide axes
88
+ plt.show()
89
+ else:
90
+ print("No image to display")
91
+
92
+ # Test the function with a prompt
93
+ prompt = "A flower with rain"
94
+ image = generate_image(prompt)
95
+
96
+ # Display the generated image
97
+ show_image(image)
98
+
99
+ from transformers import AutoTokenizer, AutoModelForCausalLM
100
+
101
+ # Load GPT-Neo model for creative text generation
102
+ gpt_neo_tokenizer = AutoTokenizer.from_pretrained("EleutherAI/gpt-neo-125M")
103
+ gpt_neo_model = AutoModelForCausalLM.from_pretrained("EleutherAI/gpt-neo-125M")
104
+
105
+ # Function to generate creative text based on translated text
106
+ def generate_creative_text(translated_text):
107
+ input_ids = gpt_neo_tokenizer(translated_text, return_tensors='pt').input_ids
108
+ generated_text_ids = gpt_neo_model.generate(input_ids, max_length=100)
109
+ creative_text = gpt_neo_tokenizer.decode(generated_text_ids[0], skip_special_tokens=True)
110
+ return creative_text
111
+
112
+ import gradio as gr
113
+
114
+ # Function to handle the full workflow
115
+ def translate_generate_image_and_text(tamil_text):
116
+ # Step 1: Translate Tamil text to English
117
+ translated_text = translate_text(tamil_text)
118
+
119
+ # Step 2: Generate an image based on the translated text
120
+ image = generate_image(translated_text)
121
+
122
+ # Step 3: Generate creative text based on the translated text
123
+ creative_text = generate_creative_text(translated_text)
124
+
125
+ return translated_text, creative_text, image
126
+
127
+ # Create Gradio interface
128
+ interface = gr.Interface(
129
+ fn=translate_generate_image_and_text,
130
+ inputs="text",
131
+ outputs=["text", "text", "image"],
132
+ title="Tamil to English Translation, Image Generation & Creative Text",
133
+ description="Enter Tamil text to translate to English, generate an image, and create creative text based on the translation."
134
+ )
135
+
136
+ # Launch Gradio app
137
+ interface.launch()