Sujithanumala commited on
Commit
9947a95
·
verified ·
1 Parent(s): dc582ee

Latest App.py

Browse files
Files changed (1) hide show
  1. app.py +22 -33
app.py CHANGED
@@ -1,38 +1,27 @@
1
  import gradio as gr
2
  from typing import List
3
- from langchain_google_genai import GoogleGenerativeAIEmbeddings
4
- import google.generativeai as genai
5
- from langchain_community.vectorstores import FAISS
6
- from langchain_google_genai import ChatGoogleGenerativeAI
7
- import re
8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9
 
10
- genai.configure(api_key="AIzaSyD2o8vjePJb6z8vT_PVe82lVWMD3_cBL0g")
11
 
12
-
13
- def format_gemini_response(text):
14
-
15
- bold_pattern = r"\*\*(.*?)\*\*"
16
- italic_pattern = r"\*(.*?)\*"
17
- code_pattern = r"(.*?)"
18
- text = text.replace('\n', '<br>')
19
- formatted_text = re.sub(code_pattern,"<pre><code>\\1</code></pre>",text)
20
- formatted_text = re.sub(bold_pattern, "<b>\\1</b>", formatted_text)
21
- formatted_text = re.sub(italic_pattern, "<i>\\1</i>", formatted_text)
22
-
23
- return formatted_text
24
- def predict(message :str , chat_his ,d ) -> str:
25
- model = genai.GenerativeModel("gemini-pro")
26
- his = []
27
- # for i,j in history:
28
- # his.extend([
29
- # {"role": "user", "parts": i},
30
- # {"role": "model", "parts": j},
31
- # ])
32
- chat = model.start_chat(
33
- history=his
34
- )
35
- response = chat.send_message(message)
36
- return format_gemini_response(response.text),chat_his, d
37
- iface = gr.Interface(fn = predict,inputs = ["text","list","json"],outputs = "text")
38
- iface.launch(debug = True)
 
1
  import gradio as gr
2
  from typing import List
3
+ import json
4
+ from Classes.Owiki_Class import OWiki
 
 
 
5
 
6
+ def predict(query :str , chat_history : List[tuple[str,str]] , invocation_type : str = "OIC" , schemas : dict= {}) -> str:
7
+ with open("src/config.json",'r') as f:
8
+ hyperparameters = json.load(f)
9
+ a = OWiki(**hyperparameters)
10
+ if invocation_type =="SQL":
11
+ try:
12
+ res = a.create_sql_agent(query,schemas)
13
+ except Exception as e:
14
+ return e
15
+ elif invocation_type == "OIC":
16
+ try:
17
+ chat = ""
18
+ for user,bot in chat_history:
19
+ chat+= f"User: {user} Bot: {bot}\n\n"
20
+ res = a.search_from_db(query, chat)
21
+ except Exception as e:
22
+ return e
23
+ return res
24
 
 
25
 
26
+ iface = gr.Interface(fn = predict,inputs = ["text","list","text","json"],outputs = "text")
27
+ iface.launch(debug=True)