mgokg commited on
Commit
40ce5ac
·
verified ·
1 Parent(s): fa566da

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -34
app.py CHANGED
@@ -1,21 +1,11 @@
1
  import gradio as gr
 
2
  import os
 
3
  import google.generativeai as genai
4
- import logging
5
- import time
6
- import backoff
7
- import google.ai.generativelanguage as glm
8
-
9
- # Configure Logging
10
- logging.basicConfig(level=logging.ERROR, format='%(asctime)s - %(levelname)s - %(message)s')
11
 
12
  # Load environment variables
13
- try:
14
- genai.configure(api_key=os.environ["geminiapikey"])
15
- except KeyError:
16
- logging.error("Error: 'geminiapikey' environment variable not found.")
17
- exit(1)
18
-
19
  read_key = os.environ.get('HF_TOKEN', None)
20
 
21
  custom_css = """
@@ -25,14 +15,14 @@ custom_css = """
25
  background: #202020;
26
  padding: 20px;
27
  color: white;
28
- border: 1px solid white;
29
  }
30
  """
31
 
32
  def predict(prompt):
33
  # Create the model
34
  generation_config = {
35
- "temperature": 0.7,
36
  "top_p": 0.95,
37
  "top_k": 40,
38
  "max_output_tokens": 2048,
@@ -40,36 +30,32 @@ def predict(prompt):
40
  }
41
 
42
  model = genai.GenerativeModel(
43
- model_name="gemini-1.5-pro",
 
44
  generation_config=generation_config,
45
  )
46
 
47
- # Create the tools configuration
48
-
49
- tools_config = glm.ToolConfig(
50
- function_declarations=[],
51
- search_queries=[prompt],
52
  )
53
-
54
- response = model.generate_content(
55
- contents=[prompt], # Directly pass the prompt
56
- tools=[tools_config]
57
- )
58
-
59
- if response and response.text:
60
- return response.text
61
 
62
  # Create the Gradio interface
63
  with gr.Blocks(css=custom_css) as demo:
64
  with gr.Row():
65
- details_output = gr.Markdown(label="answer", elem_id="md")
66
- with gr.Row():
67
- ort_input = gr.Textbox(label="prompt", placeholder="ask anything...")
68
  with gr.Row():
69
- button = gr.Button("Senden")
 
 
70
 
71
  # Connect the button to the function
72
- button.click(fn=predict, inputs=ort_input, outputs=details_output)
73
 
74
  # Launch the Gradio application
75
  demo.launch()
 
1
  import gradio as gr
2
+ import requests
3
  import os
4
+ import json
5
  import google.generativeai as genai
 
 
 
 
 
 
 
6
 
7
  # Load environment variables
8
+ genai.configure(api_key=os.environ["geminiapikey"])
 
 
 
 
 
9
  read_key = os.environ.get('HF_TOKEN', None)
10
 
11
  custom_css = """
 
15
  background: #202020;
16
  padding: 20px;
17
  color: white;
18
+ border: 1 px solid white;
19
  }
20
  """
21
 
22
  def predict(prompt):
23
  # Create the model
24
  generation_config = {
25
+ "temperature": 0.3,
26
  "top_p": 0.95,
27
  "top_k": 40,
28
  "max_output_tokens": 2048,
 
30
  }
31
 
32
  model = genai.GenerativeModel(
33
+ #model_name="gemini-1.5-pro",
34
+ model_name="gemini-2.0-flash-exp",
35
  generation_config=generation_config,
36
  )
37
 
38
+ chat_session = model.start_chat(
39
+ history=[
40
+ ]
 
 
41
  )
42
+
43
+ response = chat_session.send_message(prompt)
44
+ #response = model.generate_content(contents=prompt, tools='google_search_retrieval')
45
+ return response.text
 
 
 
 
46
 
47
  # Create the Gradio interface
48
  with gr.Blocks(css=custom_css) as demo:
49
  with gr.Row():
50
+ details_output = gr.Markdown(label="answer", elem_id="md")
51
+ #details_output = gr.Textbox(label="Ausgabe", value = f"\n\n\n\n")
 
52
  with gr.Row():
53
+ ort_input = gr.Textbox(label="prompt", placeholder="ask anything...")
54
+ with gr.Row():
55
+ button = gr.Button("Senden")
56
 
57
  # Connect the button to the function
58
+ button.click(fn=predict, inputs=ort_input, outputs=details_output)
59
 
60
  # Launch the Gradio application
61
  demo.launch()