Update myapp.py
Browse files
myapp.py
CHANGED
@@ -4,7 +4,7 @@ from transformers import AutoModel, AutoTokenizer
|
|
4 |
from fastsafetensors import safe_load
|
5 |
|
6 |
# Initialize the Flask app
|
7 |
-
|
8 |
|
9 |
# Load the model and tokenizer using safe_load
|
10 |
model_path = "https://huggingface.co/prompthero/openjourney-v4/blob/main/safety_checker/model.safetensors" # Replace with your .safetensors file path
|
@@ -19,11 +19,11 @@ tokenizer = AutoTokenizer.from_pretrained(model_name)
|
|
19 |
# Load the model weights from safeload
|
20 |
model = AutoModel.from_pretrained(model_name, state_dict=model_data)
|
21 |
|
22 |
-
@
|
23 |
def index():
|
24 |
return "Welcome to the AI Model API!"
|
25 |
|
26 |
-
@
|
27 |
def generate_output():
|
28 |
data = request.json
|
29 |
prompt = data.get('prompt', 'Hello, world!')
|
@@ -39,4 +39,4 @@ def generate_output():
|
|
39 |
return jsonify(outputs)
|
40 |
|
41 |
if __name__ == "__main__":
|
42 |
-
|
|
|
4 |
from fastsafetensors import safe_load
|
5 |
|
6 |
# Initialize the Flask app
|
7 |
+
myapp = Flask(__name__)
|
8 |
|
9 |
# Load the model and tokenizer using safe_load
|
10 |
model_path = "https://huggingface.co/prompthero/openjourney-v4/blob/main/safety_checker/model.safetensors" # Replace with your .safetensors file path
|
|
|
19 |
# Load the model weights from safeload
|
20 |
model = AutoModel.from_pretrained(model_name, state_dict=model_data)
|
21 |
|
22 |
+
@myapp.route('/')
|
23 |
def index():
|
24 |
return "Welcome to the AI Model API!"
|
25 |
|
26 |
+
@myapp.route('/generate', methods=['POST'])
|
27 |
def generate_output():
|
28 |
data = request.json
|
29 |
prompt = data.get('prompt', 'Hello, world!')
|
|
|
39 |
return jsonify(outputs)
|
40 |
|
41 |
if __name__ == "__main__":
|
42 |
+
myapp.run(host='0.0.0.0', port=5000)
|