hasnanmr commited on
Commit
ca8e725
·
1 Parent(s): 05d89d9

add temperature

Browse files
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -91,6 +91,7 @@ def normalize_query(user_query):
91
  try:
92
  response = client.chat.completions.create(
93
  model="llama-3.1-70b-versatile",
 
94
  messages=[{
95
  "role": "user",
96
  "content": f"""
@@ -111,7 +112,7 @@ def get_user_embedding(query):
111
  return model.encode(query)
112
  except Exception as e:
113
  print(f"Error generating embedding: {e}")
114
- return np.zeros(512) # Assuming 384 as default embedding size
115
 
116
  # Find similar places based on cosine similarity
117
  def get_similar_places(user_embedding):
@@ -153,6 +154,7 @@ def get_top_5_destinations(user_query):
153
  'similarity': similarity
154
  })
155
 
 
156
  return top_places
157
 
158
  # Generate response to user using Groq VM
@@ -175,6 +177,7 @@ def generate_response(user_query, top_places):
175
  # Generate the response using the model
176
  response = client.chat.completions.create(
177
  model="llama-3.1-70b-versatile",
 
178
  messages=[
179
  {"role": "system", "content": system_prompt}, # System prompt defines behavior
180
  {"role": "user", "content": f"Berikut adalah rekomendasi berdasarkan data: {destinations_data}"}
@@ -195,9 +198,6 @@ def chatbot(user_query):
195
  if isinstance(top_places, str): # Error case, e.g. "No places found"
196
  return top_places
197
 
198
- # Step 2: Generate the chatbot's response
199
- # response = generate_response(user_query, top_places)
200
-
201
  # only the first 5 element of top_places
202
  response = generate_response(user_query, top_places[:5])
203
 
 
91
  try:
92
  response = client.chat.completions.create(
93
  model="llama-3.1-70b-versatile",
94
+ temperature=0.5,
95
  messages=[{
96
  "role": "user",
97
  "content": f"""
 
112
  return model.encode(query)
113
  except Exception as e:
114
  print(f"Error generating embedding: {e}")
115
+ return np.zeros()
116
 
117
  # Find similar places based on cosine similarity
118
  def get_similar_places(user_embedding):
 
154
  'similarity': similarity
155
  })
156
 
157
+ print(normalized_query)
158
  return top_places
159
 
160
  # Generate response to user using Groq VM
 
177
  # Generate the response using the model
178
  response = client.chat.completions.create(
179
  model="llama-3.1-70b-versatile",
180
+ temperature=0.2,
181
  messages=[
182
  {"role": "system", "content": system_prompt}, # System prompt defines behavior
183
  {"role": "user", "content": f"Berikut adalah rekomendasi berdasarkan data: {destinations_data}"}
 
198
  if isinstance(top_places, str): # Error case, e.g. "No places found"
199
  return top_places
200
 
 
 
 
201
  # only the first 5 element of top_places
202
  response = generate_response(user_query, top_places[:5])
203