Geek7 commited on
Commit
c615408
·
verified ·
1 Parent(s): d7f2d01

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +48 -15
app.py CHANGED
@@ -1,14 +1,20 @@
 
 
1
  import os
2
  from huggingface_hub import InferenceClient
 
3
  from PIL import Image
4
- import gradio as gr # Import Gradio for the UI
 
 
 
5
 
6
  # Initialize the InferenceClient with your Hugging Face token
7
  HF_TOKEN = os.environ.get("HF_TOKEN") # Ensure to set your Hugging Face token in the environment
8
- client = InferenceClient(token=HF_TOKEN) # Initialize the client
9
 
10
  # Function to generate an image from a text prompt
11
- def generate_image(prompt, negative_prompt=None, model=None):
12
  try:
13
  # Generate the image using Hugging Face's inference API
14
  image = client.text_to_image(prompt=prompt, negative_prompt=negative_prompt, model=model)
@@ -17,15 +23,42 @@ def generate_image(prompt, negative_prompt=None, model=None):
17
  print(f"Error generating image: {str(e)}")
18
  return None
19
 
20
- # Set up the Gradio interface
21
- gr.Interface(
22
- fn=generate_image,
23
- inputs=[
24
- gr.Textbox(label="Prompt", placeholder="Enter a text prompt", lines=2),
25
- gr.Textbox(label="Negative Prompt (Optional)", placeholder="Enter negative prompt", lines=2),
26
- gr.Textbox(label="Model Name", placeholder="Enter model name", value="stabilityai/stable-diffusion-2-1"),
27
- ],
28
- outputs="image",
29
- title="Image Generation with Hugging Face",
30
- description="Enter a prompt, optional negative prompt, and model name to generate an image.",
31
- ).launch() # Launch the Gradio interface
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from flask import Flask, request, jsonify, send_file
2
+ from flask_cors import CORS
3
  import os
4
  from huggingface_hub import InferenceClient
5
+ from io import BytesIO
6
  from PIL import Image
7
+
8
+ # Initialize the Flask app
9
+ app = Flask(__name__)
10
+ CORS(app) # Enable CORS for all routes
11
 
12
  # Initialize the InferenceClient with your Hugging Face token
13
  HF_TOKEN = os.environ.get("HF_TOKEN") # Ensure to set your Hugging Face token in the environment
14
+ client = InferenceClient(token=HF_TOKEN)
15
 
16
  # Function to generate an image from a text prompt
17
+ def generate_image(prompt, negative_prompt=None, model="stabilityai/stable-diffusion-2-1"):
18
  try:
19
  # Generate the image using Hugging Face's inference API
20
  image = client.text_to_image(prompt=prompt, negative_prompt=negative_prompt, model=model)
 
23
  print(f"Error generating image: {str(e)}")
24
  return None
25
 
26
+ # Flask route for the API endpoint to generate an image
27
+ @app.route('/generate_image', methods=['POST'])
28
+ def generate_api():
29
+ data = request.get_json()
30
+
31
+ # Extract required fields from the request
32
+ prompt = data.get('prompt', '')
33
+ negative_prompt = data.get('negative_prompt', None)
34
+ model_name = data.get('model', 'stabilityai/stable-diffusion-2-1') # Default model
35
+
36
+ if not prompt:
37
+ return jsonify({"error": "Prompt is required"}), 400
38
+
39
+ try:
40
+ # Call the generate_image function with the provided parameters
41
+ image = generate_image(prompt, negative_prompt, model_name)
42
+
43
+ if image:
44
+ # Save the image to a BytesIO object
45
+ img_byte_arr = BytesIO()
46
+ image.save(img_byte_arr, format='PNG') # Convert the image to PNG
47
+ img_byte_arr.seek(0) # Move to the start of the byte stream
48
+
49
+ # Send the generated image as a response
50
+ return send_file(
51
+ img_byte_arr,
52
+ mimetype='image/png',
53
+ as_attachment=False, # Send the file as an attachment
54
+ download_name='generated_image.png' # The file name for download
55
+ )
56
+ else:
57
+ return jsonify({"error": "Failed to generate image"}), 500
58
+ except Exception as e:
59
+ print(f"Error in generate_api: {str(e)}") # Log the error
60
+ return jsonify({"error": str(e)}), 500
61
+
62
+ # Add this block to make sure your app runs when called
63
+ if __name__ == "__main__":
64
+ app.run(host='0.0.0.0', port=7860) # Run directly if needed for testing