alibicer commited on
Commit
2c5504c
·
verified ·
1 Parent(s): b4d2eac

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -3
app.py CHANGED
@@ -19,7 +19,7 @@ if os.path.exists(".env"):
19
  OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
20
  client = OpenAI(api_key=OPENAI_API_KEY)
21
 
22
- def gpt_call(history, user_message, model="gpt-4o-mini", max_tokens=512, temperature=0.7, top_p=0.95):
23
  """
24
  Calls OpenAI Chat API to generate responses.
25
  - history: [(user_text, assistant_text), ...]
@@ -44,7 +44,13 @@ def gpt_call(history, user_message, model="gpt-4o-mini", max_tokens=512, tempera
44
  top_p=top_p
45
  )
46
 
47
- return completion.choices[0].message.content
 
 
 
 
 
 
48
 
49
  def respond(user_message, history):
50
  """
@@ -92,7 +98,8 @@ with gr.Blocks() as demo:
92
  # Initialize chatbot with first message
93
  chatbot = gr.Chatbot(
94
  value=[("", INITIAL_PROMPT)], # Initial system message
95
- height=500
 
96
  )
97
 
98
  # Maintain chat history state
 
19
  OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
20
  client = OpenAI(api_key=OPENAI_API_KEY)
21
 
22
+ def gpt_call(history, user_message, model="gpt-4o-mini", max_tokens=1024, temperature=0.7, top_p=0.95):
23
  """
24
  Calls OpenAI Chat API to generate responses.
25
  - history: [(user_text, assistant_text), ...]
 
44
  top_p=top_p
45
  )
46
 
47
+ response = completion.choices[0].message.content
48
+
49
+ # Ensure long responses are handled
50
+ if len(response) > 1000:
51
+ response = response[:1000] + "...\n[Message too long. Type 'continue' to see more.]"
52
+
53
+ return response
54
 
55
  def respond(user_message, history):
56
  """
 
98
  # Initialize chatbot with first message
99
  chatbot = gr.Chatbot(
100
  value=[("", INITIAL_PROMPT)], # Initial system message
101
+ height=600, # Increased height for better visibility
102
+ scroll=True # Allow scrolling for long responses
103
  )
104
 
105
  # Maintain chat history state