File size: 3,472 Bytes
01c75ae
eca9523
01c75ae
 
eca9523
01c75ae
 
 
 
 
 
 
 
 
 
918a703
 
 
01c75ae
 
 
918a703
c207609
918a703
 
 
 
 
 
 
 
 
 
 
 
 
 
eca9523
 
 
 
 
 
 
918a703
 
 
 
01c75ae
 
 
918a703
 
 
01c75ae
918a703
cde132e
 
 
918a703
 
 
cde132e
 
 
 
 
918a703
 
eca9523
918a703
 
01c75ae
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
import os
from typing import List, Tuple
import openai  # Assuming you're using OpenAI's API (make sure to install the OpenAI package)
from flask import Flask, request, jsonify

# Initialize Flask app
app = Flask(__name__)

# Set the OpenAI API key
openai.api_key = os.getenv("OPENAI_API_KEY")

# Define a system message
SYSTEM_MESSAGE = "You are a helpful assistant."

# Function to generate AI response
def generate_response(
    user_input: str, 
    history: List[Tuple[str, str]], 
    max_tokens: int = 150, 
    temperature: float = 0.7, 
    top_p: float = 1.0
) -> str:
    """
    Generates a response from the AI model.
    Args:
        user_input: The user's input message.
        history: A list of tuples containing the conversation history 
                 (user input, AI response).
        max_tokens: The maximum number of tokens in the generated response.
        temperature: Controls the randomness of the generated response.
        top_p: Controls the nucleus sampling probability.
    Returns:
        str: The generated response from the AI model.
    """
    try:
        # Build the message list with system message and history
        messages = [{"role": "system", "content": SYSTEM_MESSAGE}]
        
        # Iterate through the history list and format accordingly
        for user_message, assistant_message in history:
            messages.append({"role": "user", "content": user_message})
            messages.append({"role": "assistant", "content": assistant_message})
        
        # Add the current user input
        messages.append({"role": "user", "content": user_input})

        # Generate response from the model
        response = ""
        for msg in openai.ChatCompletion.create(
            model="gpt-3.5-turbo",  # You can use any model you prefer
            messages=messages,
            max_tokens=max_tokens,
            temperature=temperature,
            top_p=top_p,
            stream=True
        ):
            # Check if 'choices' is present and non-empty
            if msg and 'choices' in msg and msg['choices']:
                # Ensure the 'delta' and 'content' properties exist before using them
                token = msg['choices'][0].get('delta', {}).get('content', '')
                if token:
                    response += token
            else:
                # Handle unexpected response format or empty choices
                print("Warning: Unexpected response format or empty 'choices'.")
                break
        return response or "Sorry, I couldn't generate a response. Please try again."

    except Exception as e:
        # Log the error for debugging purposes
        print(f"An error occurred: {e}")
        return "Error: An unexpected error occurred while processing your request."

# Route to handle user input and generate responses
@app.route("/chat", methods=["POST"])
def chat():
    try:
        # Get user input from the request
        user_input = request.json.get("user_input", "")
        history = request.json.get("history", [])
        
        # Generate the AI response
        response = generate_response(
            user_input=user_input, 
            history=history
        )
        
        # Return the response as JSON
        return jsonify({"response": response})

    except Exception as e:
        return jsonify({"error": str(e)}), 500

if __name__ == "__main__":
    # Run the app
    app.run(debug=True, host="0.0.0.0", port=5000)