File size: 918 Bytes
0f14421
44c37a8
 
0f14421
 
44c37a8
0f14421
b82e92c
0f14421
 
 
 
 
8b0addf
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
# https://chat.lmsys.org/?leaderboard
import langchain
# https://huggingface.co/spaces/joyson072/LLm-Langchain/blob/main/app.py
from langchain.llms import HuggingFaceHub

# https://colab.research.google.com/drive/1hrS6_g14EcOD4ezwSGlGX2zxJegX5uNX#scrollTo=NUwUR9U7qkld
llm_hf = HuggingFaceHub(
    repo_id="OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5",
    model_kwargs={"temperature":0.9 }
)

text = "Why did the chicken cross the road?"

output_question_1 = llm_hf(text)
print(output_question_1)



###
## FACT EXTRACTION
###

fact_extraction_prompt = PromptTemplate(
    input_variables=["text_input"],
    template="Extract the key facts out of this text. Don't include opinions. Give each fact a number and keep them short sentences. :\n\n {text_input}"
)

fact_extraction_chain = LLMChain(llm=llm, prompt=fact_extraction_prompt)

facts = fact_extraction_chain.run(text + " " +output_question_1)

print(facts)