Shreyas094 commited on
Commit
4c78583
·
verified ·
1 Parent(s): 480bd35

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -4
app.py CHANGED
@@ -1,6 +1,7 @@
1
  import os
2
  import logging
3
  import asyncio
 
4
  from typing import AsyncGenerator, Tuple
5
  import gradio as gr
6
  from huggingface_hub import InferenceClient
@@ -12,8 +13,16 @@ from duckduckgo_search import DDGS
12
  # Configure logging
13
  logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
14
 
15
- # Environment variables and configurations
16
- huggingface_token = os.environ.get("HUGGINGFACE_TOKEN")
 
 
 
 
 
 
 
 
17
 
18
  MODELS = [
19
  "mistralai/Mistral-7B-Instruct-v0.3",
@@ -25,7 +34,6 @@ MODELS = [
25
  "google/gemma-2-27b-it"
26
  ]
27
 
28
- # Default system message template
29
  DEFAULT_SYSTEM_PROMPT = """You are a world-class financial AI assistant, capable of complex reasoning and reflection.
30
  Reason through the query inside <thinking> tags, and then provide your final response inside <output> tags.
31
  Providing comprehensive and accurate information based on web search results is essential.
@@ -92,7 +100,9 @@ Write a detailed and complete research document that fulfills the following user
92
  if history:
93
  messages = history + messages
94
 
95
- client = InferenceClient(model, token=huggingface_token)
 
 
96
  full_response = ""
97
 
98
  for call in range(num_calls):
 
1
  import os
2
  import logging
3
  import asyncio
4
+ import random # Import random for token selection
5
  from typing import AsyncGenerator, Tuple
6
  import gradio as gr
7
  from huggingface_hub import InferenceClient
 
13
  # Configure logging
14
  logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
15
 
16
+ # List of Hugging Face tokens
17
+ huggingface_tokens = [
18
+ os.environ.get("HUGGINGFACE_TOKEN_1"),
19
+ os.environ.get("HUGGINGFACE_TOKEN_2"),
20
+ os.environ.get("HUGGINGFACE_TOKEN_3")
21
+ ]
22
+
23
+ # Function to get a random Hugging Face token
24
+ def get_random_token():
25
+ return random.choice(huggingface_tokens)
26
 
27
  MODELS = [
28
  "mistralai/Mistral-7B-Instruct-v0.3",
 
34
  "google/gemma-2-27b-it"
35
  ]
36
 
 
37
  DEFAULT_SYSTEM_PROMPT = """You are a world-class financial AI assistant, capable of complex reasoning and reflection.
38
  Reason through the query inside <thinking> tags, and then provide your final response inside <output> tags.
39
  Providing comprehensive and accurate information based on web search results is essential.
 
100
  if history:
101
  messages = history + messages
102
 
103
+ # Get a random token for the API call
104
+ token = get_random_token()
105
+ client = InferenceClient(model, token=token)
106
  full_response = ""
107
 
108
  for call in range(num_calls):