warhawkmonk commited on
Commit
de0d9d3
·
verified ·
1 Parent(s): 97e078b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +22 -1
app.py CHANGED
@@ -23,6 +23,27 @@ from streamlit_modal import Modal
23
  import streamlit.components.v1 as components
24
  from datetime import datetime
25
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
26
 
27
  def image_to_base64(image_path):
28
  with open(image_path, "rb") as img_file:
@@ -149,7 +170,7 @@ with column2:
149
  if prompts_[-1]=="@working":
150
  if index==0:
151
  st.write(prompts_[0].upper())
152
- data_need=st.write_stream(llm_text_response()(prompts_[0]))
153
  dictionary['every_prompt_with_val'][-1]=(prompts_[0],str(data_need))
154
 
155
  elif isinstance(prompts_[-1],str):
 
23
  import streamlit.components.v1 as components
24
  from datetime import datetime
25
 
26
+ def consume_llm_api(prompt):
27
+ """
28
+ Sends a prompt to the LLM API and processes the streamed response.
29
+ """
30
+ url = "https://b8c1-2405-201-802c-f859-29bd-7fc5-dcb-1fe4.ngrok-free.app/api/llm-response"
31
+ headers = {"Content-Type": "application/json"}
32
+ payload = {"prompt": prompt}
33
+
34
+ try:
35
+ print("Sending prompt to the LLM API...")
36
+ with requests.post(url, json=payload, headers=headers, stream=True) as response:
37
+ response.raise_for_status()
38
+ print("Response from LLM API:\n")
39
+ for line in response:
40
+ yield(line.decode('utf-8'))
41
+ # print(type(response))
42
+ # yield(response)
43
+ except requests.RequestException as e:
44
+ print(f"Error consuming API: {e}")
45
+ except Exception as e:
46
+ print(f"Unexpected error: {e}")
47
 
48
  def image_to_base64(image_path):
49
  with open(image_path, "rb") as img_file:
 
170
  if prompts_[-1]=="@working":
171
  if index==0:
172
  st.write(prompts_[0].upper())
173
+ data_need=st.write_stream(consume_llm_api(prompts_[0]))
174
  dictionary['every_prompt_with_val'][-1]=(prompts_[0],str(data_need))
175
 
176
  elif isinstance(prompts_[-1],str):