ChrisNguyenAI commited on
Commit
794a015
·
1 Parent(s): d846477

add more models

Browse files
.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ /myvenv
.gradio/certificate.pem ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ -----BEGIN CERTIFICATE-----
2
+ MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw
3
+ TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh
4
+ cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4
5
+ WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu
6
+ ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY
7
+ MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc
8
+ h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+
9
+ 0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U
10
+ A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW
11
+ T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH
12
+ B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC
13
+ B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv
14
+ KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn
15
+ OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn
16
+ jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw
17
+ qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI
18
+ rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
19
+ HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq
20
+ hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL
21
+ ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ
22
+ 3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK
23
+ NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5
24
+ ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur
25
+ TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC
26
+ jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc
27
+ oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq
28
+ 4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA
29
+ mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d
30
+ emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc=
31
+ -----END CERTIFICATE-----
__pycache__/models.cpython-313.pyc ADDED
Binary file (2.22 kB). View file
 
app.py CHANGED
@@ -1,9 +1,16 @@
1
  from models import ModelChain
2
  import gradio as gr
3
 
4
- DEEPSEEK_MODEL = "deepseek/deepseek-r1:free"
5
- GEMINI_MODEL = "google/gemini-2.0-flash-exp:free"
6
  QWEN_MODEL="qwen/qwen2.5-vl-72b-instruct:free"
 
 
 
 
 
 
 
7
 
8
  def get_models_response(models,user_input,system_prompt):
9
  if len(models) >1:
@@ -12,18 +19,32 @@ def get_models_response(models,user_input,system_prompt):
12
  print(f"Reponse using model {models}")
13
  chain = ModelChain()
14
  if models[0]=="deepseek-r1":
15
- return chain.get_model_response(DEEPSEEK_MODEL,user_input,system_prompt)
16
  elif models[0]=="gemini-2.0-flash-exp":
17
- return chain.get_model_response(GEMINI_MODEL,user_input,system_prompt)
18
  elif models[0]=="qwen2.5-vl-72b-instruct":
19
  return chain.get_model_response(QWEN_MODEL,user_input,system_prompt)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
20
  else:
21
  return "Current Unsupported"
22
 
23
  def main():
24
  view = gr.Interface(
25
  fn= get_models_response,
26
- inputs = [gr.CheckboxGroup(["gemini-2.0-flash-exp","deepseek-r1","qwen2.5-vl-72b-instruct"], label = "Response model", value = "deepseek-r1"),gr.Textbox(label = "Your input",lines = 10, placeholder = "Nhập nội dung"), gr.Textbox(label = "Nhiệm vụ của Bot", placeholder = "Vd: bạn là một chuyên gia thương mại điện tử 10 năm kinh nghiệm hãy giúp tôi trả lời các câu hỏi sau")],
27
  outputs = gr.Textbox(label ="Output", lines = 26),
28
  flagging_mode = "never",
29
  stop_btn = gr.Button("Stop",variant = "stop",visible = True),
 
1
  from models import ModelChain
2
  import gradio as gr
3
 
4
+ DEEPSEEK_R1_MODEL = "deepseek/deepseek-r1:free"
5
+ GEMINI_2ZERO_MODEL = "google/gemini-2.0-flash-exp:free"
6
  QWEN_MODEL="qwen/qwen2.5-vl-72b-instruct:free"
7
+ DOLPHIN_MODEL = "cognitivecomputations/dolphin3.0-r1-mistral-24b:free"
8
+ MISTRAL_MODEL = "mistralai/mistral-small-24b-instruct-2501:free"
9
+ ROGUE_MODEL = "sophosympatheia/rogue-rose-103b-v0.2:free"
10
+ DEEPSEEK_V3_MODEL = "deepseek/deepseek-chat:free"
11
+ META_LLAMA_MODEL = "meta-llama/llama-3.3-70b-instruct:free"
12
+ GEMINI_1DOT5_MODEL="google/gemini-flash-1.5-8b"
13
+ MYTHOMAX_L2_13B_MODEL ="gryphe/mythomax-l2-13b"
14
 
15
  def get_models_response(models,user_input,system_prompt):
16
  if len(models) >1:
 
19
  print(f"Reponse using model {models}")
20
  chain = ModelChain()
21
  if models[0]=="deepseek-r1":
22
+ return chain.get_model_response(DEEPSEEK_R1_MODEL,user_input,system_prompt)
23
  elif models[0]=="gemini-2.0-flash-exp":
24
+ return chain.get_model_response(GEMINI_2ZERO_MODEL,user_input,system_prompt)
25
  elif models[0]=="qwen2.5-vl-72b-instruct":
26
  return chain.get_model_response(QWEN_MODEL,user_input,system_prompt)
27
+ elif models[0] =="dolphin3.0-r1-mistral-24b":
28
+ return chain.get_model_response(DOLPHIN_MODEL,user_input,system_prompt)
29
+ elif models[0] == "mistral-small-24b-instruct-2501":
30
+ return chain.get_model_response(MISTRAL_MODEL,user_input,system_prompt)
31
+ elif models[0] == "rogue-rose-103b-v0.2":
32
+ return chain.get_model_response(ROGUE_MODEL,user_input,system_prompt)
33
+ elif models[0] == "deepseek-chat":
34
+ return chain.get_model_response(DEEPSEEK_V3_MODEL,user_input,system_prompt)
35
+ elif models[0] == "llama-3.3-70b-instruct":
36
+ return chain.get_model_response(META_LLAMA_MODEL,user_input,system_prompt)
37
+ elif models[0] == "gemini-flash-1.5-8b":
38
+ return chain.get_model_response(GEMINI_1DOT5_MODEL,user_input,system_prompt)
39
+ elif models[0] == "mythomax-l2-13b":
40
+ return chain.get_model_response(MYTHOMAX_L2_13B_MODEL,user_input,system_prompt)
41
  else:
42
  return "Current Unsupported"
43
 
44
  def main():
45
  view = gr.Interface(
46
  fn= get_models_response,
47
+ inputs = [gr.CheckboxGroup(["gemini-2.0-flash-exp","deepseek-r1","qwen2.5-vl-72b-instruct","dolphin3.0-r1-mistral-24b","mistral-small-24b-instruct-2501","rogue-rose-103b-v0.2","deepseek-chat","llama-3.3-70b-instruct","gemini-flash-1.5-8b","mythomax-l2-13b"], label = "Response model", value = "deepseek-r1"),gr.Textbox(label = "Your input",lines = 10, placeholder = "Nhập nội dung"), gr.Textbox(label = "Nhiệm vụ của Bot", placeholder = "Vd: bạn là một chuyên gia thương mại điện tử 10 năm kinh nghiệm hãy giúp tôi trả lời các câu hỏi sau")],
48
  outputs = gr.Textbox(label ="Output", lines = 26),
49
  flagging_mode = "never",
50
  stop_btn = gr.Button("Stop",variant = "stop",visible = True),