Kevin Hu commited on
Commit
1a43942
·
1 Parent(s): eabf8a3

make gemini robust (#3012)

Browse files

### What problem does this PR solve?

#3003

### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)

Files changed (1) hide show
  1. rag/llm/chat_model.py +2 -1
rag/llm/chat_model.py CHANGED
@@ -780,10 +780,11 @@ class GeminiChat(Base):
780
  ans += resp.text
781
  yield ans
782
 
 
783
  except Exception as e:
784
  yield ans + "\n**ERROR**: " + str(e)
785
 
786
- yield response._chunks[-1].usage_metadata.total_token_count
787
 
788
 
789
  class GroqChat:
 
780
  ans += resp.text
781
  yield ans
782
 
783
+ yield response._chunks[-1].usage_metadata.total_token_count
784
  except Exception as e:
785
  yield ans + "\n**ERROR**: " + str(e)
786
 
787
+ yield 0
788
 
789
 
790
  class GroqChat: