File size: 655 Bytes
8a8ca58
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
from loguru import logger
import ollama


def models():
    return sorted([x['model'] for x in ollama.list()['models']], key=lambda x: (not x.startswith("openhermes"), x))


def ask(model, system_prompt, pre_prompt, question):
    messages = [
        {
            'role': 'system',
            'content': f"{system_prompt} {pre_prompt}",
        },
        {
            'role': 'user',
            'content': f"{question}",
        },
    ]
    logger.debug(f"<< {model} << {question}")
    response = ollama.chat(model=model, messages=messages)
    answer = response['message']['content']
    logger.debug(f">> {model} >> {answer}")
    return answer