Mubbashir Ahmed commited on
Commit
c95d4c7
Β·
1 Parent(s): 445701c

checking db conn

Browse files
Files changed (1) hide show
  1. app.py +70 -3
app.py CHANGED
@@ -1,12 +1,75 @@
1
  import os
2
  import gradio as gr
3
  from huggingface_hub import InferenceClient
 
 
4
 
5
  # Clients for each model provider
6
  llama_client = InferenceClient(provider="sambanova", api_key=os.environ["HF_TOKEN"])
7
  minimax_client = InferenceClient(provider="novita", api_key=os.environ["HF_TOKEN"])
8
  mistral_client = InferenceClient(provider="together", api_key=os.environ["HF_TOKEN"])
9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10
  # Format chat history for Markdown display
11
  def format_chat_history(chat_history):
12
  formatted = ""
@@ -26,10 +89,14 @@ def format_chat_history(chat_history):
26
  # Main chat handler
27
  def chat_with_model(model_choice, prompt, image_url, chat_history):
28
  if not prompt:
29
- return "Please enter a text prompt.", chat_history, "", ""
30
 
31
  if chat_history is None:
32
  chat_history = []
 
 
 
 
33
 
34
  try:
35
  # === LLaMA 4 ===
@@ -67,12 +134,12 @@ def chat_with_model(model_choice, prompt, image_url, chat_history):
67
  chat_history.append({"role": "assistant", "content": bot_msg})
68
 
69
  else:
70
- return "Unsupported model selected.", chat_history, "", ""
71
 
72
  return format_chat_history(chat_history), chat_history, "", ""
73
 
74
  except Exception as e:
75
- return f"Error: {e}", chat_history
76
 
77
  # Gradio interface
78
  with gr.Blocks() as demo:
 
1
  import os
2
  import gradio as gr
3
  from huggingface_hub import InferenceClient
4
+ import pyodbc
5
+ # from sqlalchemy import create_engine
6
 
7
  # Clients for each model provider
8
  llama_client = InferenceClient(provider="sambanova", api_key=os.environ["HF_TOKEN"])
9
  minimax_client = InferenceClient(provider="novita", api_key=os.environ["HF_TOKEN"])
10
  mistral_client = InferenceClient(provider="together", api_key=os.environ["HF_TOKEN"])
11
 
12
+ # Global objects
13
+ db_connection = None
14
+
15
+ def connect_sql_pyodbc():
16
+ server = os.getenv("SQL_SERVER")
17
+ database = os.getenv("SQL_DATABASE")
18
+ username = os.getenv("SQL_USERNAME")
19
+ password = os.getenv("SQL_PASSWORD")
20
+
21
+ conn_str = (
22
+ f"Driver={{ODBC Driver 17 for SQL Server}};"
23
+ f"Server={server};"
24
+ f"Database={database};"
25
+ f"UID={username};"
26
+ f"PWD={password};"
27
+ )
28
+
29
+ try:
30
+ conn = pyodbc.connect(conn_str)
31
+ print("βœ… pyodbc connection successful")
32
+ return conn
33
+ except Exception as e:
34
+ print(f"❌ pyodbc connection failed: {e}")
35
+ return None
36
+
37
+ def get_sql_connection():
38
+ global db_connection
39
+
40
+ if db_connection is not None:
41
+ try:
42
+ db_connection.cursor() # test if still open
43
+ return db_connection
44
+ except Exception as e:
45
+ print(f"❌ SQL connection failed: {e}")
46
+ db_connection = None # reset if broken
47
+
48
+ # Reconnect if needed
49
+ db_connection = connect_sql_pyodbc()
50
+ return db_connection
51
+
52
+ # def connect_sql_sqlalchemy():
53
+ # server = os.getenv("SQL_SERVER")
54
+ # database = os.getenv("SQL_DATABASE")
55
+ # username = os.getenv("SQL_USERNAME")
56
+ # password = os.getenv("SQL_PASSWORD")
57
+ # driver = "ODBC+Driver+17+for+SQL+Server" # URL encoded version
58
+
59
+ # conn_url = (
60
+ # f"mssql+pyodbc://{username}:{password}@{server}/{database}"
61
+ # f"?driver={driver.replace(' ', '+')}"
62
+ # )
63
+
64
+ # try:
65
+ # engine = create_engine(conn_url)
66
+ # conn = engine.connect()
67
+ # print("βœ… SQLAlchemy connection successful")
68
+ # return conn
69
+ # except Exception as e:
70
+ # print(f"❌ SQLAlchemy connection failed: {e}")
71
+ # return None
72
+
73
  # Format chat history for Markdown display
74
  def format_chat_history(chat_history):
75
  formatted = ""
 
89
  # Main chat handler
90
  def chat_with_model(model_choice, prompt, image_url, chat_history):
91
  if not prompt:
92
+ return "❌ Please enter a text prompt.", chat_history, "", ""
93
 
94
  if chat_history is None:
95
  chat_history = []
96
+
97
+ conn = get_sql_connection()
98
+ if conn is None:
99
+ return "❌ Failed to connect to database.", chat_history, "", ""
100
 
101
  try:
102
  # === LLaMA 4 ===
 
134
  chat_history.append({"role": "assistant", "content": bot_msg})
135
 
136
  else:
137
+ return "❌ Unsupported model selected.", chat_history, "", ""
138
 
139
  return format_chat_history(chat_history), chat_history, "", ""
140
 
141
  except Exception as e:
142
+ return f"❌ Error: {e}", chat_history, "", ""
143
 
144
  # Gradio interface
145
  with gr.Blocks() as demo: