sidthephysicskid commited on
Commit
4c55a7b
·
verified ·
1 Parent(s): d3e7d7f

updated q from url

Browse files
Files changed (1) hide show
  1. app.py +23 -30
app.py CHANGED
@@ -21,52 +21,45 @@ if not SYSTEM_PROMPT:
21
 
22
  client = OpenAI(api_key=OPENAI_API_KEY)
23
 
24
- system_prompt = {
25
- "role": "system",
26
- "content": SYSTEM_PROMPT
27
- }
28
-
29
  MODEL = "gpt-3.5-turbo"
30
 
31
- def predict(message, history, url_params):
32
- # Now includes URL params in function but does not use it directly for OpenAI call
 
 
 
33
  history_openai_format = [system_prompt]
34
- for human, assistant in history:
35
- history_openai_format.append({"role": "user", "content": human})
36
- history_openai_format.append({"role": "assistant", "content": assistant})
37
  history_openai_format.append({"role": "user", "content": message})
38
 
39
  response = client.chat.completions.create(model=MODEL,
40
  messages=history_openai_format,
41
  temperature=1.0,
42
- stream=True)
43
-
44
- partial_message = ""
45
- for chunk in response:
46
- if chunk.choices[0].delta.content:
47
- partial_message += chunk.choices[0].delta.content
48
- yield partial_message
49
-
50
- # JavaScript functions to manipulate URL parameters
51
- get_window_url_params = """
52
- function() {
53
- const params = new URLSearchParams(window.location.search);
54
- return { question: params.get("question") || "" };
55
- }
56
  """
57
 
58
  with gr.Blocks() as app:
59
- url_params = gr.Variable()
60
- history = gr.Variable() # Assuming history management is handled elsewhere or is not needed for first query
61
- message = gr.Textbox(label="Your Question", value="")
62
- message.change(fn=predict, inputs=[message, history, url_params], outputs=message, _js=get_window_url_params)
 
 
63
 
64
- app.load(_js=get_window_url_params) # Load the question from URL on startup
65
 
66
  app.launch(share=True, debug=True)
67
 
68
 
69
-
70
  # V0
71
  # from openai import OpenAI
72
  # import gradio as gr
 
21
 
22
  client = OpenAI(api_key=OPENAI_API_KEY)
23
 
 
 
 
 
 
24
  MODEL = "gpt-3.5-turbo"
25
 
26
+ def predict(message):
27
+ system_prompt = {
28
+ "role": "system",
29
+ "content": SYSTEM_PROMPT
30
+ }
31
  history_openai_format = [system_prompt]
 
 
 
32
  history_openai_format.append({"role": "user", "content": message})
33
 
34
  response = client.chat.completions.create(model=MODEL,
35
  messages=history_openai_format,
36
  temperature=1.0,
37
+ max_tokens=150,
38
+ stream=False)
39
+ return response.choices[0].message['content']
40
+
41
+ # JavaScript function to get the question from URL on load
42
+ js_on_load = """
43
+ function() {
44
+ const params = new URLSearchParams(window.location.search);
45
+ const question = params.get("question") || "Enter your question here";
46
+ return [question];
47
+ }
 
 
 
48
  """
49
 
50
  with gr.Blocks() as app:
51
+ with gr.Row():
52
+ question_input = gr.Textbox(label="Your Question", placeholder="Enter your question here")
53
+ submit_button = gr.Button("Submit")
54
+ answer_output = gr.Textbox(label="Answer", interactive=False)
55
+
56
+ submit_button.click(fn=predict, inputs=question_input, outputs=answer_output)
57
 
58
+ app.load(js=js_on_load) # Load the question from URL on startup
59
 
60
  app.launch(share=True, debug=True)
61
 
62
 
 
63
  # V0
64
  # from openai import OpenAI
65
  # import gradio as gr