Npps's picture
init1
5cc3f6e verified
raw
history blame
1.83 kB
import os
import gradio as gr
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.messages import HumanMessage, SystemMessage
from langchain_huggingface import HuggingFaceEndpoint
from langchain_core.output_parsers import StrOutputParser
# Ensure environment variables are loaded
langchain_key = os.getenv("LANGCHAIN_API_KEY")
HF_key = os.getenv("HUGGINGFACEHUB_TOKEN")
if langchain_key is None or HF_key is None:
raise ValueError("API keys are not set. Please set LANGCHAIN_API_KEY and HUGGINGFACEHUB_TOKEN.")
os.environ["LANGCHAIN_TRACING_V2"] = "true"
os.environ["LANGCHAIN_API_KEY"] = langchain_key
os.environ['HUGGINGFACEHUB_TOKEN'] = HF_key
# 1. Prompt Template
system_template = "Translate the following from into {language}:"
human_template = "{text}"
prompt_template = ChatPromptTemplate.from_messages([
SystemMessage(content=system_template),
HumanMessage(content=human_template)
])
# 2. Create model
llm = HuggingFaceEndpoint(
repo_id="mistralai/Mistral-7B-Instruct-v0.3",
task="translation",
max_new_tokens=150,
do_sample=False,
token=HF_key
)
# 3. Create parser
parser = StrOutputParser()
# 4. Create chain
chain = prompt_template | llm | parser
def translate(text, language):
input_data = {
"text": text,
"language": language
}
# Use the .invoke() method to properly run the chain
result = chain.invoke(input_data)
return result
# Gradio interface
iface = gr.Interface(
fn=translate,
inputs=[
gr.Textbox(lines=2, placeholder="Enter text to translate"),
gr.Textbox(lines=1, placeholder="Enter target language")
],
outputs="text",
title="LangChain Translation Service",
description="Translate text using LangChain and Hugging Face."
)
if __name__ == "__main__":
iface.launch()