danielle2003 commited on
Commit
20f6665
·
verified ·
1 Parent(s): 579e76c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +50 -15
app.py CHANGED
@@ -51,7 +51,6 @@ if st.session_state.show_overlay == False:
51
  else:
52
  top= -7
53
  left =-5
54
-
55
  # Folder to store chat histories
56
  CHAT_DIR = "chat_histories"
57
  os.makedirs(CHAT_DIR, exist_ok=True)
@@ -67,7 +66,7 @@ def save_chat_history():
67
 
68
  # Function to load data
69
 
70
- def query_huggingface_model(selected_model: dict, input_data, input_type="text",max_new_tokens=512,task="text-classification"):
71
  API_URL = selected_model.get("url")
72
  headers = {"Authorization": f"Bearer {HF_TOKEN}"}
73
 
@@ -81,7 +80,9 @@ def query_huggingface_model(selected_model: dict, input_data, input_type="text",
81
  "content": input_data
82
  }
83
  ],
84
- "max_tokens": max_new_tokens,
 
 
85
  "model":selected_model.get("model")
86
  }
87
 
@@ -617,6 +618,7 @@ if st.session_state.framework == "dash":
617
  image3 = 'images.png'
618
  margin = 0
619
  margintop = "-20"
 
620
  st.session_state.models = [
621
  {
622
  "model": "distilbert-base-uncased-finetuned-sst-2-english",
@@ -654,6 +656,8 @@ if st.session_state.framework == "dash":
654
  margin = 0
655
  margintop = -20
656
  task = "text-generation"
 
 
657
  st.session_state.models = [
658
  {
659
  "model": "deepseek-ai/DeepSeek-V3",
@@ -688,6 +692,8 @@ if st.session_state.framework == "dash":
688
  image2 = 'hugging.png'
689
  margintop = -90
690
  image3 = 'Captured.png'
 
 
691
  st.session_state.models = [
692
  {
693
  "model": "Salesforce/blip-image-captioning-large",
@@ -852,11 +858,11 @@ if st.session_state.framework == "dash":
852
  /*border-bottom:none !important;*/
853
  margin-right: 300px !important;
854
  margin-top: 12.5px !important;
855
- z-index: 11 !important; /* Ensure it stays above other elements */
856
  border-bottom:1px solid #ccc;
857
- width:64%;
858
  height:85px;
859
- margin-left:520px;
 
860
 
861
 
862
  }}
@@ -868,7 +874,7 @@ if st.session_state.framework == "dash":
868
  border:none;
869
  }}
870
  div[data-testid="stApp"]{{
871
- background:#f5f3f0;
872
  height: 98.9vh; /* Full viewport height */
873
  width: 99%;
874
  border-radius: 10px !important;
@@ -887,8 +893,8 @@ if st.session_state.framework == "dash":
887
 
888
  }}
889
  section[data-testid="stAppScrollToBottomContainer"]{{
890
- margin-top:80px !important;
891
- padding-right:35px !important;
892
  }}
893
  div[data-testid="stChatMessageAvatarUser"]{{
894
  display:none;
@@ -950,7 +956,7 @@ if st.session_state.framework == "dash":
950
 
951
  }}
952
  [class*="st-key-content_1"] {{
953
- background: white;
954
  border-radius: 10px;
955
  /* box-shadow: 0px 4px 10px rgba(0, 0, 0, 0.1);*/
956
  width: 90%;
@@ -964,11 +970,11 @@ if st.session_state.framework == "dash":
964
  position: fixed; /* Fix the position of the container */
965
  top: 1%; /* Adjust top offset */
966
  left: 15%; /* Adjust left offset */
967
- height: 96.5vh; /* Full viewport height */
968
  overflow-x:hidden;
969
  overflow-y:hidden;
970
  display: flex;
971
 
 
972
 
973
  }}
974
 
@@ -976,7 +982,7 @@ if st.session_state.framework == "dash":
976
  div[data-testid="stSidebarHeader"]{{
977
  height:40px !important;
978
  position:fixed;
979
- z-index:100000000000;
980
  background:#f9f9f9;
981
  width:300px;
982
  }}
@@ -1246,9 +1252,33 @@ if st.session_state.framework == "dash":
1246
  padding:15px;
1247
  padding-left:100px;
1248
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1249
  =
1250
 
1251
  }}
 
 
 
 
 
 
1252
  div[data-testid="stTextInput"]
1253
  {{
1254
  background-color: transparent; /* White background */
@@ -1280,7 +1310,7 @@ if st.session_state.framework == "dash":
1280
  width:40px !important;
1281
  margin-top:{top}% !important;
1282
  margin-left:{left}% !important;
1283
- z-index:10000000;
1284
  background-image: url(data:image/png;base64,{base64.b64encode(open("side.png", "rb").read()).decode()});
1285
  background-size: contain; /* size of the image */
1286
  background-repeat: no-repeat;
@@ -1518,6 +1548,10 @@ if st.session_state.framework == "dash":
1518
  model_names = [m["model"] for m in st.session_state.models]
1519
  selected_model_name = st.selectbox(task, model_names)
1520
  selected_model = next((m for m in st.session_state.models if m["model"] == selected_model_name), None)
 
 
 
 
1521
  if st.session_state.show_overlay == True:
1522
  st.header(f"{text_h1} – What can I help you with?")
1523
  user_input = ''
@@ -1699,7 +1733,8 @@ if st.session_state.framework == "dash":
1699
  selected_model,
1700
  st.session_state.input_text,
1701
  input_type=st.session_state.input_task,
1702
- task=task
 
1703
  )
1704
 
1705
  st.session_state.messages.append({
@@ -1790,7 +1825,7 @@ if st.session_state.framework == "dash":
1790
  with st.chat_message("assistant"):
1791
  with st.spinner("Model is generating a response..."):
1792
  st.session_state.messages.append({"role": "user", "content": prompt,"image":""})
1793
- result = query_huggingface_model(selected_model, prompt , input_type="text",task=task)
1794
  st.markdown(f"**Model:** `{selected_model['model'] if isinstance(selected_model, dict) else selected_model}`")
1795
  response = extract_response_content(result)
1796
  st.write_stream(generate_stream_response(response)) # Add assistant response to chat history
 
51
  else:
52
  top= -7
53
  left =-5
 
54
  # Folder to store chat histories
55
  CHAT_DIR = "chat_histories"
56
  os.makedirs(CHAT_DIR, exist_ok=True)
 
66
 
67
  # Function to load data
68
 
69
+ def query_huggingface_model(selected_model: dict, input_data, input_type="text",max_tokens=512,task="text-classification",temperature=0.7, top_p=0.9 ):
70
  API_URL = selected_model.get("url")
71
  headers = {"Authorization": f"Bearer {HF_TOKEN}"}
72
 
 
80
  "content": input_data
81
  }
82
  ],
83
+ "max_tokens": max_tokens,
84
+ "temperature": temperature,
85
+ "top_p": top_p,
86
  "model":selected_model.get("model")
87
  }
88
 
 
618
  image3 = 'images.png'
619
  margin = 0
620
  margintop = "-20"
621
+ display = "none"
622
  st.session_state.models = [
623
  {
624
  "model": "distilbert-base-uncased-finetuned-sst-2-english",
 
656
  margin = 0
657
  margintop = -20
658
  task = "text-generation"
659
+ display = "block"
660
+
661
  st.session_state.models = [
662
  {
663
  "model": "deepseek-ai/DeepSeek-V3",
 
692
  image2 = 'hugging.png'
693
  margintop = -90
694
  image3 = 'Captured.png'
695
+ display = "none"
696
+
697
  st.session_state.models = [
698
  {
699
  "model": "Salesforce/blip-image-captioning-large",
 
858
  /*border-bottom:none !important;*/
859
  margin-right: 300px !important;
860
  margin-top: 12.5px !important;
 
861
  border-bottom:1px solid #ccc;
862
+ width:48%;
863
  height:85px;
864
+ margin-left:770px;
865
+ z-index:1000000;
866
 
867
 
868
  }}
 
874
  border:none;
875
  }}
876
  div[data-testid="stApp"]{{
877
+ background:white;
878
  height: 98.9vh; /* Full viewport height */
879
  width: 99%;
880
  border-radius: 10px !important;
 
893
 
894
  }}
895
  section[data-testid="stAppScrollToBottomContainer"]{{
896
+ margin-top:50px !important;
897
+ padding-right:5px !important;
898
  }}
899
  div[data-testid="stChatMessageAvatarUser"]{{
900
  display:none;
 
956
 
957
  }}
958
  [class*="st-key-content_1"] {{
959
+ background: transparent;
960
  border-radius: 10px;
961
  /* box-shadow: 0px 4px 10px rgba(0, 0, 0, 0.1);*/
962
  width: 90%;
 
970
  position: fixed; /* Fix the position of the container */
971
  top: 1%; /* Adjust top offset */
972
  left: 15%; /* Adjust left offset */
 
973
  overflow-x:hidden;
974
  overflow-y:hidden;
975
  display: flex;
976
 
977
+ z-index:10000;
978
 
979
  }}
980
 
 
982
  div[data-testid="stSidebarHeader"]{{
983
  height:40px !important;
984
  position:fixed;
985
+ z-index:10000;
986
  background:#f9f9f9;
987
  width:300px;
988
  }}
 
1252
  padding:15px;
1253
  padding-left:100px;
1254
 
1255
+
1256
+
1257
+ }}
1258
+ div[data-testid="stExpander"]
1259
+ {{
1260
+ width: 250px !important;
1261
+ background-color: white !important;
1262
+ margin-top: -102px !important;
1263
+ position: relative;
1264
+ margin-left: 289px !important;
1265
+ z-index:10000000000000000000;
1266
+ position:fixed;
1267
+ border-bottom:1px solid #ccc;
1268
+ border-radius:0px;
1269
+ height:87px;
1270
+ padding-top:35px;
1271
+ padding-left:10px;
1272
+
1273
  =
1274
 
1275
  }}
1276
+ div[data-testid="stExpander"]>details{{
1277
+ z-index:10000000000000000000;
1278
+
1279
+ display:{display} !important;
1280
+ background:white;
1281
+ }}
1282
  div[data-testid="stTextInput"]
1283
  {{
1284
  background-color: transparent; /* White background */
 
1310
  width:40px !important;
1311
  margin-top:{top}% !important;
1312
  margin-left:{left}% !important;
1313
+ z-index:1000000000000;
1314
  background-image: url(data:image/png;base64,{base64.b64encode(open("side.png", "rb").read()).decode()});
1315
  background-size: contain; /* size of the image */
1316
  background-repeat: no-repeat;
 
1548
  model_names = [m["model"] for m in st.session_state.models]
1549
  selected_model_name = st.selectbox(task, model_names)
1550
  selected_model = next((m for m in st.session_state.models if m["model"] == selected_model_name), None)
1551
+ with st.expander("⚙️ Advanced Settings"):
1552
+ temperature = st.slider("Temperature", 0.0, 2.0, 0.7, step=0.1)
1553
+ top_p = st.slider("Top-p ", 0.0, 1.0, 0.9, step=0.05)
1554
+ max_tokens = st.slider("Max tokens", 50, 1024, 512, step=50)
1555
  if st.session_state.show_overlay == True:
1556
  st.header(f"{text_h1} – What can I help you with?")
1557
  user_input = ''
 
1733
  selected_model,
1734
  st.session_state.input_text,
1735
  input_type=st.session_state.input_task,
1736
+ task=task,
1737
+ temperature=temperature,top_p=top_p,max_tokens=max_tokens
1738
  )
1739
 
1740
  st.session_state.messages.append({
 
1825
  with st.chat_message("assistant"):
1826
  with st.spinner("Model is generating a response..."):
1827
  st.session_state.messages.append({"role": "user", "content": prompt,"image":""})
1828
+ result = query_huggingface_model(selected_model, prompt , input_type="text",task=task,temperature=temperature,top_p=top_p,max_tokens=max_tokens)
1829
  st.markdown(f"**Model:** `{selected_model['model'] if isinstance(selected_model, dict) else selected_model}`")
1830
  response = extract_response_content(result)
1831
  st.write_stream(generate_stream_response(response)) # Add assistant response to chat history