Shreyas094 commited on
Commit
d7a8a93
1 Parent(s): eef32e4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -529,7 +529,7 @@ def get_response_from_llama(query, model, selected_docs, file_type, num_calls=1,
529
  for _ in range(num_calls):
530
  try:
531
  # Generate content with streaming enabled
532
- for response in client.text_generation(
533
  prompt=prompt,
534
  max_new_tokens=1000, # Reduced to ensure we stay within token limits
535
  temperature=temperature,
 
529
  for _ in range(num_calls):
530
  try:
531
  # Generate content with streaming enabled
532
+ for response in client.chat_completion(
533
  prompt=prompt,
534
  max_new_tokens=1000, # Reduced to ensure we stay within token limits
535
  temperature=temperature,