lamdao commited on
Commit
b42bafc
·
verified ·
1 Parent(s): 1bdcf54

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +33 -14
app.py CHANGED
@@ -1,16 +1,29 @@
1
  import gradio as gr
2
  import requests
 
3
  from huggingface_hub import InferenceClient
4
 
5
  API_TOKEN = "your_huggingface_api_token" # Replace with your actual token
6
  API_URL = "https://api-inference.huggingface.co/models/InterSync/Mistral-7B-Instruct-v0.2-Function-Calling"
7
  headers = {"Authorization": f"Bearer {API_TOKEN}"}
8
 
9
- def get_weather(location, unit="celsius"):
10
- """Gets the current weather in a given location."""
11
- # Your code to fetch weather data using an external API (e.g., OpenWeatherMap)
12
- ...
13
- return weather_data
 
 
 
 
 
 
 
 
 
 
 
 
14
 
15
  def query_model(payload):
16
  response = requests.post(API_URL, headers=headers, json=payload)
@@ -31,19 +44,25 @@ with gr.Blocks() as demo:
31
 
32
  def bot(history):
33
  user_message = history[-1][0]
34
- function_call = {
35
- "name": "get_weather",
36
- "arguments": {"location": user_message}
37
- }
38
  payload = {
39
  "inputs": user_message,
40
- "parameters": {
41
- "function_call": function_call
42
- }
43
  }
44
  output = query_model(payload)
45
- bot_response = output[0]['generated_text'] # Assuming API returns text in this format
46
- history[-1][1] = bot_response # Update the last message in history with bot response
 
 
 
 
 
 
 
 
 
 
 
 
47
  return history
48
 
49
  input_text.change(user, [input_text, output_text], [input_text, output_text], queue=False).then(
 
1
  import gradio as gr
2
  import requests
3
+ import json
4
  from huggingface_hub import InferenceClient
5
 
6
  API_TOKEN = "your_huggingface_api_token" # Replace with your actual token
7
  API_URL = "https://api-inference.huggingface.co/models/InterSync/Mistral-7B-Instruct-v0.2-Function-Calling"
8
  headers = {"Authorization": f"Bearer {API_TOKEN}"}
9
 
10
+ def get_weather(location: str, unit: str = "celsius"):
11
+ # Replace with your actual weather API call
12
+ pass
13
+
14
+ def get_weather_schema():
15
+ return {
16
+ "name": "get_weather",
17
+ "description": "Get the current weather in a given location",
18
+ "parameters": {
19
+ "type": "object",
20
+ "properties": {
21
+ "location": {"type": "string", "description": "The city and state, or zip code"},
22
+ "unit": {"type": "string", "enum": ["celsius", "fahrenheit"], "description": "Unit of temperature"}
23
+ },
24
+ "required": ["location"]
25
+ }
26
+ }
27
 
28
  def query_model(payload):
29
  response = requests.post(API_URL, headers=headers, json=payload)
 
44
 
45
  def bot(history):
46
  user_message = history[-1][0]
 
 
 
 
47
  payload = {
48
  "inputs": user_message,
49
+ "parameters": {"function_call": "auto"}
 
 
50
  }
51
  output = query_model(payload)
52
+
53
+ # Parse the model's response
54
+ if 'function_call' in output and 'name' in output['function_call']:
55
+ function_name = output['function_call']['name']
56
+ arguments = output['function_call'].get('arguments', {})
57
+ if function_name == "get_weather" and arguments:
58
+ weather_info = get_weather(**arguments)
59
+ response_message = f"The weather in {arguments['location']} is {weather_info['description']} with a temperature of {weather_info['temperature']} {weather_info['unit']}."
60
+ else:
61
+ response_message = "Function not found or invalid arguments."
62
+ else:
63
+ response_message = output[0]['generated_text']
64
+
65
+ history[-1][1] = response_message
66
  return history
67
 
68
  input_text.change(user, [input_text, output_text], [input_text, output_text], queue=False).then(