invincible-jha commited on
Commit
48c0d8d
·
verified ·
1 Parent(s): 7d3b780

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +37 -15
app.py CHANGED
@@ -9,33 +9,48 @@ from sklearn.naive_bayes import MultinomialNB
9
  import asyncio
10
  from crewai import Agent, Task, Crew
11
  from huggingface_hub import InferenceClient
 
 
12
  import random
13
  import json
 
 
 
 
 
14
 
15
  # Set up logging
16
  logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
17
  logger = logging.getLogger(__name__)
18
 
19
- # Function to get Hugging Face API token
20
  def get_huggingface_api_token():
21
  token = os.getenv('HUGGINGFACEHUB_API_TOKEN')
22
- if not token:
23
- try:
24
- with open('config.json', 'r') as config_file:
25
- config = json.load(config_file)
26
- token = config.get('HUGGINGFACEHUB_API_TOKEN')
27
- except (FileNotFoundError, json.JSONDecodeError):
28
- logger.warning("Config file not found or invalid. Please provide the Hugging Face API token.")
29
-
30
- if not token:
31
- token = input("Please enter your Hugging Face API token: ")
32
-
33
- return token
 
 
 
 
 
 
34
 
35
  # Initialize the Hugging Face Inference Client
36
  try:
37
  hf_token = get_huggingface_api_token()
 
 
38
  client = InferenceClient(model="mistralai/Mistral-7B-Instruct-v0.2", token=hf_token)
 
39
  except Exception as e:
40
  logger.error(f"Failed to initialize Hugging Face client: {e}")
41
  sys.exit(1)
@@ -106,6 +121,13 @@ def post_process_response(response):
106
 
107
  return response
108
 
 
 
 
 
 
 
 
109
  # CrewAI setup
110
  communication_expert_crew = Agent(
111
  role='Communication Expert',
@@ -113,7 +135,7 @@ communication_expert_crew = Agent(
113
  backstory="""You are an expert in communication, specializing in understanding and rephrasing queries to ensure they are interpreted in the most positive and constructive light. Your role is crucial in setting the tone for respectful and empathetic interactions.""",
114
  verbose=True,
115
  allow_delegation=False,
116
- tools=[generate_response]
117
  )
118
 
119
  response_expert_crew = Agent(
@@ -122,7 +144,7 @@ response_expert_crew = Agent(
122
  backstory="""You are an expert in Zerodha's services and policies, with a keen ability to provide comprehensive and empathetic responses. Your role is to ensure that all user queries are addressed accurately while maintaining a respectful and supportive tone.""",
123
  verbose=True,
124
  allow_delegation=False,
125
- tools=[generate_response]
126
  )
127
 
128
  # Main function
 
9
  import asyncio
10
  from crewai import Agent, Task, Crew
11
  from huggingface_hub import InferenceClient
12
+ from langchain.tools import Tool
13
+ from langchain.agents import Tool as LangChainTool
14
  import random
15
  import json
16
+ import warnings
17
+ from langchain.deprecation import LangChainDeprecationWarning
18
+
19
+ # Suppress LangChain deprecation warnings
20
+ warnings.filterwarnings("ignore", category=LangChainDeprecationWarning)
21
 
22
  # Set up logging
23
  logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
24
  logger = logging.getLogger(__name__)
25
 
 
26
  def get_huggingface_api_token():
27
  token = os.getenv('HUGGINGFACEHUB_API_TOKEN')
28
+ if token:
29
+ logger.info("Hugging Face API token found in environment variables.")
30
+ return token
31
+
32
+ try:
33
+ with open('config.json', 'r') as config_file:
34
+ config = json.load(config_file)
35
+ token = config.get('HUGGINGFACEHUB_API_TOKEN')
36
+ if token:
37
+ logger.info("Hugging Face API token found in config.json file.")
38
+ return token
39
+ except FileNotFoundError:
40
+ logger.warning("Config file not found.")
41
+ except json.JSONDecodeError:
42
+ logger.error("Error reading the config file. Please check its format.")
43
+
44
+ logger.error("Hugging Face API token not found. Please set it up.")
45
+ return None
46
 
47
  # Initialize the Hugging Face Inference Client
48
  try:
49
  hf_token = get_huggingface_api_token()
50
+ if not hf_token:
51
+ raise ValueError("Hugging Face API token is not set. Please set it up before running the application.")
52
  client = InferenceClient(model="mistralai/Mistral-7B-Instruct-v0.2", token=hf_token)
53
+ logger.info("Hugging Face Inference Client initialized successfully.")
54
  except Exception as e:
55
  logger.error(f"Failed to initialize Hugging Face client: {e}")
56
  sys.exit(1)
 
121
 
122
  return response
123
 
124
+ # Define the tool for CrewAI
125
+ generate_response_tool = Tool(
126
+ name="GenerateResponse",
127
+ func=generate_response,
128
+ description="Generate a response using the Mistral model"
129
+ )
130
+
131
  # CrewAI setup
132
  communication_expert_crew = Agent(
133
  role='Communication Expert',
 
135
  backstory="""You are an expert in communication, specializing in understanding and rephrasing queries to ensure they are interpreted in the most positive and constructive light. Your role is crucial in setting the tone for respectful and empathetic interactions.""",
136
  verbose=True,
137
  allow_delegation=False,
138
+ tools=[generate_response_tool]
139
  )
140
 
141
  response_expert_crew = Agent(
 
144
  backstory="""You are an expert in Zerodha's services and policies, with a keen ability to provide comprehensive and empathetic responses. Your role is to ensure that all user queries are addressed accurately while maintaining a respectful and supportive tone.""",
145
  verbose=True,
146
  allow_delegation=False,
147
+ tools=[generate_response_tool]
148
  )
149
 
150
  # Main function