Spaces:
Sleeping
Sleeping
from langchain_community.chat_models import ChatOllama | |
from langchain_core.output_parsers import StrOutputParser | |
from langchain_core.prompts import ChatPromptTemplate | |
# Local Llama3 | |
llm = ChatOllama( | |
model="llama3", | |
keep_alive=-1, | |
temperature=0, | |
max_new_tokens=512) | |
# you can changed template for genereted your text | |
prompt = ChatPromptTemplate.from_template("Write me a 100 word article on {topic} from the perspective of a {profession}. ") | |
chain = prompt | llm | StrOutputParser() | |
for chunk in chain.stream({"topic": "LLMs", "profession": "labor"}): | |
print(chunk, end="", flush=True) |