sidthephysicskid commited on
Commit
cc33446
·
verified ·
1 Parent(s): 4c55a7b

testing url params

Browse files
Files changed (1) hide show
  1. app.py +67 -58
app.py CHANGED
@@ -1,65 +1,74 @@
1
- from openai import OpenAI
2
  import gradio as gr
3
- import os, json
4
-
5
- # Attempt to load configuration from config.json
6
- try:
7
- with open('config.json') as config_file:
8
- config = json.load(config_file)
9
- OPENAI_API_KEY = config.get("OPENAI_API_KEY")
10
- SYSTEM_PROMPT = config.get("SYSTEM_PROMPT")
11
- except FileNotFoundError:
12
- # If config.json is not found, fall back to environment variables
13
- OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
14
- SYSTEM_PROMPT = os.getenv("SYSTEM_PROMPT")
15
-
16
- # Fallback to default values if necessary
17
- if not OPENAI_API_KEY:
18
- raise ValueError("OPENAI_API_KEY is not set in config.json or as an environment variable.")
19
- if not SYSTEM_PROMPT:
20
- SYSTEM_PROMPT = "This is a default system prompt."
21
-
22
- client = OpenAI(api_key=OPENAI_API_KEY)
23
-
24
- MODEL = "gpt-3.5-turbo"
25
-
26
- def predict(message):
27
- system_prompt = {
28
- "role": "system",
29
- "content": SYSTEM_PROMPT
30
- }
31
- history_openai_format = [system_prompt]
32
- history_openai_format.append({"role": "user", "content": message})
33
-
34
- response = client.chat.completions.create(model=MODEL,
35
- messages=history_openai_format,
36
- temperature=1.0,
37
- max_tokens=150,
38
- stream=False)
39
- return response.choices[0].message['content']
40
-
41
- # JavaScript function to get the question from URL on load
42
- js_on_load = """
43
- function() {
44
- const params = new URLSearchParams(window.location.search);
45
- const question = params.get("question") || "Enter your question here";
46
- return [question];
47
- }
48
- """
49
-
50
- with gr.Blocks() as app:
51
- with gr.Row():
52
- question_input = gr.Textbox(label="Your Question", placeholder="Enter your question here")
53
- submit_button = gr.Button("Submit")
54
- answer_output = gr.Textbox(label="Answer", interactive=False)
55
-
56
- submit_button.click(fn=predict, inputs=question_input, outputs=answer_output)
57
-
58
- app.load(js=js_on_load) # Load the question from URL on startup
59
 
60
- app.launch(share=True, debug=True)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
61
 
 
62
 
 
63
  # V0
64
  # from openai import OpenAI
65
  # import gradio as gr
 
 
1
  import gradio as gr
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
 
3
+ def test(x, request: gr.Request):
4
+ return request.query_params
5
+
6
+ gr.Interface(test, "textbox", "textbox").launch()
7
+
8
+
9
+
10
+ # from openai import OpenAI
11
+ # import gradio as gr
12
+ # import os, json
13
+
14
+ # # Attempt to load configuration from config.json
15
+ # try:
16
+ # with open('config.json') as config_file:
17
+ # config = json.load(config_file)
18
+ # OPENAI_API_KEY = config.get("OPENAI_API_KEY")
19
+ # SYSTEM_PROMPT = config.get("SYSTEM_PROMPT")
20
+ # except FileNotFoundError:
21
+ # # If config.json is not found, fall back to environment variables
22
+ # OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
23
+ # SYSTEM_PROMPT = os.getenv("SYSTEM_PROMPT")
24
+
25
+ # # Fallback to default values if necessary
26
+ # if not OPENAI_API_KEY:
27
+ # raise ValueError("OPENAI_API_KEY is not set in config.json or as an environment variable.")
28
+ # if not SYSTEM_PROMPT:
29
+ # SYSTEM_PROMPT = "This is a default system prompt."
30
+
31
+ # client = OpenAI(api_key=OPENAI_API_KEY)
32
+
33
+ # MODEL = "gpt-3.5-turbo"
34
+
35
+ # def predict(message):
36
+ # system_prompt = {
37
+ # "role": "system",
38
+ # "content": SYSTEM_PROMPT
39
+ # }
40
+ # history_openai_format = [system_prompt]
41
+ # history_openai_format.append({"role": "user", "content": message})
42
+
43
+ # response = client.chat.completions.create(model=MODEL,
44
+ # messages=history_openai_format,
45
+ # temperature=1.0,
46
+ # max_tokens=150,
47
+ # stream=False)
48
+ # return response.choices[0].message['content']
49
+
50
+ # # JavaScript function to get the question from URL on load
51
+ # js_on_load = """
52
+ # function() {
53
+ # const params = new URLSearchParams(window.location.search);
54
+ # const question = params.get("question") || "Enter your question here";
55
+ # return [question];
56
+ # }
57
+ # """
58
+
59
+ # with gr.Blocks() as app:
60
+ # with gr.Row():
61
+ # question_input = gr.Textbox(label="Your Question", placeholder="Enter your question here")
62
+ # submit_button = gr.Button("Submit")
63
+ # answer_output = gr.Textbox(label="Answer", interactive=False)
64
+
65
+ # submit_button.click(fn=predict, inputs=question_input, outputs=answer_output)
66
+
67
+ # app.load(js=js_on_load) # Load the question from URL on startup
68
 
69
+ # app.launch(share=True, debug=True)
70
 
71
+ ###__________________________________________
72
  # V0
73
  # from openai import OpenAI
74
  # import gradio as gr