Geek7 commited on
Commit
925e218
·
verified ·
1 Parent(s): 3ecd8a0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +25 -24
app.py CHANGED
@@ -1,39 +1,40 @@
1
- from flask import Flask, request, send_file
2
- from flask_cors import CORS
3
  from huggingface_hub import InferenceClient
4
  from PIL import Image
5
  import io
6
  import base64
7
 
8
- app = Flask(__name__)
9
- CORS(app) # Enable CORS for all routes
10
  client = InferenceClient()
11
 
12
- @app.route('/')
13
- def home():
14
- return "Welcome to the Image Background Remover!"
15
-
16
- @app.route('/generate-image', methods=['POST'])
17
- def generate_image():
18
- data = request.json # Get the JSON data from the request
19
- base64_image = data['image'] # Get the base64 image string
20
- prompt = data['prompt'] # Get the prompt
21
-
22
  # Decode the base64 image
23
- image_data = base64.b64decode(base64_image)
24
  image = Image.open(io.BytesIO(image_data))
25
 
26
  # Generate image using the InferenceClient
27
  generated_image = client.image_to_image(image, prompt=prompt)
28
 
29
- # Save the generated image to a BytesIO object
30
  img_byte_arr = io.BytesIO()
31
  generated_image.save(img_byte_arr, format='PNG')
32
- img_byte_arr.seek(0) # Move the cursor to the beginning of the BytesIO object
33
-
34
- # Send the generated image back to the client
35
- return send_file(img_byte_arr, mimetype='image/png')
36
-
37
-
38
- if __name__ == "__main__":
39
- app.run(host='0.0.0.0', port=7860) # Run directly if needed for testing
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
 
2
  from huggingface_hub import InferenceClient
3
  from PIL import Image
4
  import io
5
  import base64
6
 
7
+ # Initialize the Inference Client
 
8
  client = InferenceClient()
9
 
10
+ # Function to handle the image generation
11
+ def generate_image(base64_image, prompt):
 
 
 
 
 
 
 
 
12
  # Decode the base64 image
13
+ image_data = base64.b64decode(base64_image.split(",")[1])
14
  image = Image.open(io.BytesIO(image_data))
15
 
16
  # Generate image using the InferenceClient
17
  generated_image = client.image_to_image(image, prompt=prompt)
18
 
19
+ # Save generated image to a BytesIO object
20
  img_byte_arr = io.BytesIO()
21
  generated_image.save(img_byte_arr, format='PNG')
22
+ img_byte_arr.seek(0) # Move the cursor to the beginning
23
+
24
+ # Return the generated image
25
+ return img_byte_arr
26
+
27
+ # Create Gradio interface
28
+ iface = gr.Interface(
29
+ fn=generate_image,
30
+ inputs=[
31
+ gr.inputs.Image(type="filepath", label="Input Image"), # Filepath for uploaded image
32
+ gr.inputs.Textbox(label="Prompt") # Textbox for the prompt
33
+ ],
34
+ outputs=gr.outputs.Image(type="numpy", label="Generated Image"), # Output as numpy array
35
+ title="Image Generation with Hugging Face",
36
+ description="Upload an image and provide a prompt to generate a new image."
37
+ )
38
+
39
+ # Launch the Gradio interface
40
+ iface.launch()