kjozsa commited on
Commit
735bc92
1 Parent(s): c30cfb8

use dynamic models list

Browse files
Files changed (1) hide show
  1. app.py +4 -11
app.py CHANGED
@@ -1,17 +1,9 @@
1
  import streamlit as st
2
- from ollama import chat
3
  from loguru import logger
4
  import re
5
 
6
- available_models = [
7
- 'openhermes',
8
- 'deepseek-coder',
9
- 'deepseek-coder:6.7b',
10
- 'falcon:7b',
11
- 'mistral:7b',
12
- 'phi',
13
- 'starling-lm'
14
- ]
15
 
16
 
17
  def ask(model, system_prompt, pre_prompt, question):
@@ -26,7 +18,7 @@ def ask(model, system_prompt, pre_prompt, question):
26
  },
27
  ]
28
  logger.debug(f"<< {model} << {question}")
29
- response = chat(model=model, messages=messages)
30
  answer = response['message']['content']
31
  logger.debug(f">> {model} >> {answer}")
32
  return answer
@@ -81,6 +73,7 @@ def main():
81
  def target(question):
82
  return re.split(r'\s|,|:', question.strip())[0].strip()
83
 
 
84
  def sanitize(question):
85
  return re.sub(r"\([^)]*\)", "", question)
86
 
 
1
  import streamlit as st
2
+ import ollama
3
  from loguru import logger
4
  import re
5
 
6
+ available_models = sorted([x['model'] for x in ollama.list()['models']], key=lambda x: (not x.startswith("openhermes"), x))
 
 
 
 
 
 
 
 
7
 
8
 
9
  def ask(model, system_prompt, pre_prompt, question):
 
18
  },
19
  ]
20
  logger.debug(f"<< {model} << {question}")
21
+ response = ollama.chat(model=model, messages=messages)
22
  answer = response['message']['content']
23
  logger.debug(f">> {model} >> {answer}")
24
  return answer
 
73
  def target(question):
74
  return re.split(r'\s|,|:', question.strip())[0].strip()
75
 
76
+
77
  def sanitize(question):
78
  return re.sub(r"\([^)]*\)", "", question)
79