asthaa30 commited on
Commit
4e08515
·
verified ·
1 Parent(s): e2a3966

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +64 -176
app.py CHANGED
@@ -6,182 +6,70 @@ from groq.types.chat.chat_completion_tool_param import ChatCompletionToolParam
6
 
7
  # Use the fine-tuned maritime legal model
8
  MODEL = "nomiChroma3.1"
9
- client = Groq(api_key=os.environ["GROQ_API_KEY"])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10
 
11
- # Define your tools if needed (e.g., legal research, document retrieval)
12
- def legal_tool_function(arguments):
13
- # Implement specific legal functions here
14
- # Placeholder for legal research or similar functionality
15
- return {"result": "Legal tool function response here"}
16
-
17
- # Define your tools
18
- legal_tool: ChatCompletionToolParam = {
19
- "type": "function",
20
- "function": {
21
- "name": "legal_tool_function",
22
- "description": "Legal assistant tool: use this for various maritime legal tasks.",
23
- "parameters": {
24
- "type": "object",
25
- "properties": {
26
- "arguments": {
27
- "type": "string",
28
- "description": "Arguments for the legal function.",
29
- },
30
- },
31
- "required": ["arguments"],
32
- },
33
- },
34
- }
35
-
36
- tools = [legal_tool]
37
-
38
- def call_function(tool_call, available_functions):
39
- function_name = tool_call.function.name
40
- if function_name not in available_functions:
41
- return {
42
- "tool_call_id": tool_call.id,
43
- "role": "tool",
44
- "content": f"Function {function_name} does not exist.",
45
- }
46
- function_to_call = available_functions[function_name]
47
- function_args = json.loads(tool_call.function.arguments)
48
- function_response = function_to_call(**function_args)
49
- return {
50
- "tool_call_id": tool_call.id,
51
- "role": "tool",
52
- "name": function_name,
53
- "content": json.dumps(function_response),
54
- }
55
-
56
- def get_model_response(messages, inner_messages, message, system_message):
57
- messages_for_model = []
58
- for msg in messages:
59
- native_messages = msg.get("metadata", {}).get("native_messages", [msg])
60
- if isinstance(native_messages, list):
61
- messages_for_model.extend(native_messages)
62
- else:
63
- messages_for_model.append(native_messages)
64
-
65
- messages_for_model.insert(
66
- 0,
67
- {
68
- "role": "system",
69
- "content": system_message,
70
- },
71
- )
72
- messages_for_model.append(
73
- {
74
- "role": "user",
75
- "content": message,
76
- }
77
- )
78
- messages_for_model.extend(inner_messages)
79
-
80
- try:
81
- response = client.chat.completions.create(
82
- model=MODEL,
83
- messages=messages_for_model,
84
- tools=tools,
85
- temperature=0.5,
86
- top_p=0.65,
87
- max_tokens=4096,
88
- )
89
- return response.choices[0].message
90
- except Exception as e:
91
- print(f"An error occurred while getting model response: {str(e)}")
92
- print(messages_for_model)
93
- return None
94
-
95
- def respond(message, history, system_message):
96
- inner_history = []
97
- available_functions = {
98
- "legal_tool_function": legal_tool_function,
99
- }
100
-
101
- assistant_content = ""
102
- assistant_native_message_list = []
103
-
104
- while True:
105
- response_message = get_model_response(history, inner_history, message, system_message)
106
-
107
- if response_message is None:
108
- return assistant_content, history
109
-
110
- if not response_message.tool_calls and response_message.content is not None:
111
- assistant_content += response_message.content
112
- assistant_native_message_list.append(response_message)
113
- break
114
-
115
- if response_message.tool_calls is not None:
116
- assistant_native_message_list.append(response_message)
117
- inner_history.append(response_message)
118
-
119
- assistant_content += (
120
- "```json\n"
121
- + json.dumps(
122
- [
123
- tool_call.model_dump()
124
- for tool_call in response_message.tool_calls
125
- ],
126
- indent=2,
127
- )
128
- + "\n```\n"
129
- )
130
- assistant_message = {
131
- "role": "assistant",
132
- "content": assistant_content,
133
- "metadata": {"native_messages": assistant_native_message_list},
134
- }
135
-
136
- # Collect responses
137
- response_list = [assistant_message]
138
-
139
- for tool_call in response_message.tool_calls:
140
- function_response = call_function(tool_call, available_functions)
141
- assistant_content += (
142
- "```json\n"
143
- + json.dumps(
144
- {
145
- "name": tool_call.function.name,
146
- "arguments": json.loads(tool_call.function.arguments),
147
- "response": json.loads(function_response["content"]),
148
- },
149
- indent=2,
150
- )
151
- + "\n```\n"
152
- )
153
- native_tool_message = {
154
- "tool_call_id": tool_call.id,
155
- "role": "tool",
156
- "content": function_response["content"],
157
- }
158
- assistant_native_message_list.append(
159
- native_tool_message
160
- )
161
- tool_message = {
162
- "role": "assistant",
163
- "content": assistant_content,
164
- "metadata": {"native_messages": assistant_native_message_list},
165
- }
166
- response_list.append(tool_message)
167
- inner_history.append(native_tool_message)
168
-
169
- return response_list, inner_history
170
-
171
- # Update the system prompt to be more relevant to maritime legal assistance
172
- system_prompt = "You are a maritime legal assistant with expertise in maritime law. Provide detailed legal advice and information based on maritime legal principles and regulations."
173
-
174
- # Use gr.Blocks and gr.Chatbot for Gradio 3.x
175
- with gr.Blocks() as demo:
176
- chatbot = gr.Chatbot()
177
- system_message_input = gr.Textbox(value=system_prompt, label="System message")
178
- message_input = gr.Textbox(label="Message")
179
-
180
- def process_message(message, history, system_message):
181
- responses, updated_history = respond(message, history, system_message)
182
- return responses, updated_history
183
-
184
- message_input.submit(process_message, [message_input, chatbot, system_message_input], [chatbot, chatbot])
185
 
186
  if __name__ == "__main__":
187
- demo.launch()
 
6
 
7
  # Use the fine-tuned maritime legal model
8
  MODEL = "nomiChroma3.1"
9
+ ##client = Groq(api_key=os.environ["GROQ_API_KEY"])
10
+
11
+
12
+ from huggingface_hub import InferenceClient
13
+
14
+ """
15
+ For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
16
+ """
17
+ client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
18
+
19
+
20
+ def respond(
21
+ message,
22
+ history: list[tuple[str, str]],
23
+ system_message,
24
+ max_tokens,
25
+ temperature,
26
+ top_p,
27
+ ):
28
+ messages = [{"role": "system", "content": system_message}]
29
+
30
+ for val in history:
31
+ if val[0]:
32
+ messages.append({"role": "user", "content": val[0]})
33
+ if val[1]:
34
+ messages.append({"role": "assistant", "content": val[1]})
35
+
36
+ messages.append({"role": "user", "content": message})
37
+
38
+ response = ""
39
+
40
+ for message in client.chat_completion(
41
+ messages,
42
+ max_tokens=max_tokens,
43
+ stream=True,
44
+ temperature=temperature,
45
+ top_p=top_p,
46
+ ):
47
+ token = message.choices[0].delta.content
48
+
49
+ response += token
50
+ yield response
51
+
52
+ """
53
+ For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
54
+ """
55
+ demo = gr.ChatInterface(
56
+ respond,
57
+ additional_inputs=[
58
+ gr.Textbox(value="You are a maritime legal assistant with expertise strictly in indian maritime law. Provide detailed legal advice and information based on indian maritime legal principles and regulations.", label="System message"),
59
+ gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
60
+ gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
61
+ gr.Slider(
62
+ minimum=0.1,
63
+ maximum=1.0,
64
+ value=0.95,
65
+ step=0.05,
66
+ label="Top-p (nucleus sampling)",
67
+ ),
68
+ ],
69
+ title="Maritime Legal Compliance",
70
+ description="This chatbot uses the fine tune Llama 3.1 which has the capabilities of responding and helping in legal advices regarding maritime",
71
+ )
72
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
73
 
74
  if __name__ == "__main__":
75
+ demo.launch()