from flask import Flask, jsonify, request from flask_cors import CORS from transformers import AutoTokenizer, AutoModelForCausalLM, GenerationConfig from huggingface_hub import login import os app = Flask(__name__) # Enable CORS for specific origins CORS(app, resources={r"api/predict/*": {"origins": ["http://localhost:3000", "https://main.dbn2ikif9ou3g.amplifyapp.com"]}}) model_id = "YALCINKAYA/opsgenius-large" model, tokenizer = get_model_and_tokenizer(model_id) def generate_response(user_input): prompt = formatted_prompt(user_input) response = prompt response def formatted_prompt(question) -> str: return f"<|im_start|>user\n{question}<|im_end|>\n<|im_start|>assistant:" @app.route("/", methods=["GET"]) def handle_get_request(): # Get the 'message' parameter from the query string message = request.args.get("message", "No message provided.") # Return a JSON response including the received message return jsonify({"message": message, "status": "GET request successful!"}) @app.route("/send_message", methods=["POST"]) def handle_post_request(): # Get the JSON data from the request data = request.get_json() # Check if data is None if data is None: return jsonify({"error": "No JSON data provided"}), 400 # Extract the 'inputs' and 'authtoken' from the JSON data message = data.get("inputs", "No message provided.") new_token = os.getenv("HF_TOKEN") # Generate a response from the model model_response = generate_response(user_input) # Return a JSON response including the generated response return jsonify({ "received_message": model_response, "status": "POST request successful!" }) # Note: Remove the app.run() call to let Hugging Face handle it # Launch the interface if __name__ == '__main__': app.run(host='0.0.0.0', port=7860)