expandme commited on
Commit
f884299
·
1 Parent(s): edd0a86

Lost in Edits

Browse files
Files changed (1) hide show
  1. app.py +15 -8
app.py CHANGED
@@ -38,11 +38,6 @@ def format_promt(message, custom_instructions=None, temperature=0.5):
38
  payload = {
39
  "inputs": {
40
  "messages": []
41
- },
42
- "parameters": {
43
- "temperature": temperature,
44
- "max_new_tokens": 1000,
45
- "stream": True
46
  }
47
  }
48
  if custom_instructions:
@@ -50,6 +45,8 @@ def format_promt(message, custom_instructions=None, temperature=0.5):
50
  payload["inputs"]["messages"].append({"role": "user", "content": message})
51
  return payload
52
 
 
 
53
  def reset_conversation():
54
  '''
55
  Resets Conversation
@@ -72,7 +69,7 @@ selected_model = st.sidebar.selectbox(
72
  • Phi-3.5: Good at technical tasks"""
73
  )
74
 
75
- temp_values = st.sidebar.slider(
76
  'Select a temperature value',
77
  0.0,
78
  1.0,
@@ -138,9 +135,19 @@ if prompt := st.chat_input(f"Hi I'm {selected_model}, How can I help you today?"
138
 
139
  try:
140
  # Use the format_promt function to prepare the request
141
- formatted_request = format_promt(prompt, custom_instructions, temp_values)
142
 
143
- output = client.post(json=formatted_request)
 
 
 
 
 
 
 
 
 
 
144
 
145
  # Create a placeholder for the streaming response
146
  message_placeholder = st.empty()
 
38
  payload = {
39
  "inputs": {
40
  "messages": []
 
 
 
 
 
41
  }
42
  }
43
  if custom_instructions:
 
45
  payload["inputs"]["messages"].append({"role": "user", "content": message})
46
  return payload
47
 
48
+
49
+
50
  def reset_conversation():
51
  '''
52
  Resets Conversation
 
69
  • Phi-3.5: Good at technical tasks"""
70
  )
71
 
72
+ temp_value = st.sidebar.slider(
73
  'Select a temperature value',
74
  0.0,
75
  1.0,
 
135
 
136
  try:
137
  # Use the format_promt function to prepare the request
138
+ formatted_request = format_promt(prompt, custom_instructions, temp_value)
139
 
140
+ output = client.chat.completions.create(
141
+ formated_request,
142
+ temperature=temp_values,#0.5
143
+ max_new_tokens=3000,
144
+ stream=True
145
+ )
146
+
147
+
148
+ client.chat.completions.create
149
+ client.chat.completions.create
150
+
151
 
152
  # Create a placeholder for the streaming response
153
  message_placeholder = st.empty()