Research-Assistant / agent /toolkits.py
GianJSX's picture
Duplicate from zej97/AI-Research-Assistant
cc93c47
raw
history blame contribute delete
364 Bytes
from agent import prompts, llm_utils
from config import Config
CFG = Config()
def english_polishing(content):
prompt = prompts.generate_english_polishing_prompt(content)
messages = [{
"role": "user",
"content": prompt,
}]
yield from llm_utils.llm_stream_response(
model=CFG.fast_llm_model,
messages=messages)