Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,4 +1,21 @@
|
|
1 |
-
import
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2 |
|
3 |
def get_model_and_tokenizer(model_id: str):
|
4 |
global model, tokenizer
|
@@ -84,3 +101,5 @@ def handle_post_request():
|
|
84 |
print(traceback.format_exc()) # Logs the full traceback
|
85 |
return jsonify({"error": str(e)}), 500
|
86 |
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import torch
|
3 |
+
from flask import Flask, jsonify, request
|
4 |
+
from flask_cors import CORS
|
5 |
+
from transformers import AutoModelForCausalLM, AutoTokenizer, BitsAndBytesConfig, GenerationConfig
|
6 |
+
import re
|
7 |
+
|
8 |
+
# Set the HF_HOME environment variable to a writable directory
|
9 |
+
os.environ["HF_HOME"] = "/workspace/huggingface_cache"
|
10 |
+
|
11 |
+
app = Flask(__name__)
|
12 |
+
|
13 |
+
# Enable CORS for specific origins
|
14 |
+
CORS(app, resources={r"/send_message": {"origins": ["http://localhost:3000", "https://main.dbn2ikif9ou3g.amplifyapp.com"]}})
|
15 |
+
|
16 |
+
# Global variables for model and tokenizer
|
17 |
+
model = None
|
18 |
+
tokenizer = None
|
19 |
|
20 |
def get_model_and_tokenizer(model_id: str):
|
21 |
global model, tokenizer
|
|
|
101 |
print(traceback.format_exc()) # Logs the full traceback
|
102 |
return jsonify({"error": str(e)}), 500
|
103 |
|
104 |
+
if __name__ == '__main__':
|
105 |
+
app.run(host='0.0.0.0', port=7860)
|