Spaces:
Sleeping
Sleeping
from langchain_community.llms import HuggingFaceEndpoint | |
import os | |
from langchain.chains import LLMChain | |
from langchain.prompts import PromptTemplate | |
import streamlit as st | |
from dotenv import load_dotenv | |
load_dotenv() | |
# def qabot(question): | |
# llm_hugginface = HuggingFaceEndpoint(repo_id='google/flan-t5-large',token=os.getenv("HUGGINGFACEHUB_API_TOKEN"),temperature=0.5,max_length=128) | |
# result= llm_hugginface("Can you write me the capital of {question}") | |
# return result | |
# ans =qabot("india") | |
# print(ans) | |
# question = "Who won the FIFA World Cup in the year 1994? " | |
def qabot(question): | |
template = """Question: {question} | |
Answer: Let's think step by step.""" | |
prompt = PromptTemplate.from_template(template) | |
repo_id = "mistralai/Mistral-7B-Instruct-v0.2" | |
llm = HuggingFaceEndpoint( | |
repo_id=repo_id, max_length=128, temperature=0.5, token=os.getenv('HUGGINGFACEHUB_API_TOKEN') | |
) | |
llm_chain = LLMChain(prompt=prompt, llm=llm) | |
result =llm_chain.run(question) | |
return result | |
# print(qabot("Who won the FIFA World Cup in the year 1994? ")) | |
st.header("Langchain Application") | |
input=st.text_input("Input: ",key="input") | |
response=qabot(input) | |
submit=st.button("Ask the question") | |
## If ask button is clicked | |
if submit: | |
st.subheader("The Response is") | |
st.write(response) | |