Spaces:
Sleeping
Sleeping
File size: 608 Bytes
cfd95d6 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 |
from langchain_community.chat_models import ChatOllama
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
# Local Llama3
llm = ChatOllama(
model="llama3",
keep_alive=-1,
temperature=0,
max_new_tokens=512)
# you can changed template for genereted your text
prompt = ChatPromptTemplate.from_template("Write me a 100 word article on {topic} from the perspective of a {profession}. ")
chain = prompt | llm | StrOutputParser()
for chunk in chain.stream({"topic": "LLMs", "profession": "labor"}):
print(chunk, end="", flush=True) |