|
import os |
|
import gradio as gr |
|
from langchain_core.prompts import ChatPromptTemplate |
|
from langchain_core.messages import HumanMessage, SystemMessage |
|
from langchain_huggingface import HuggingFaceEndpoint |
|
from langchain_core.output_parsers import StrOutputParser |
|
|
|
|
|
langchain_key = os.getenv("LANGCHAIN_API_KEY") |
|
HF_key = os.getenv("HUGGINGFACEHUB_TOKEN") |
|
|
|
if langchain_key is None or HF_key is None: |
|
raise ValueError("API keys are not set. Please set LANGCHAIN_API_KEY and HUGGINGFACEHUB_TOKEN.") |
|
|
|
os.environ["LANGCHAIN_TRACING_V2"] = "true" |
|
os.environ["LANGCHAIN_API_KEY"] = langchain_key |
|
os.environ['HUGGINGFACEHUB_TOKEN'] = HF_key |
|
|
|
|
|
system_template = "Translate the following from into {language}:" |
|
human_template = "{text}" |
|
|
|
prompt_template = ChatPromptTemplate.from_messages([ |
|
SystemMessage(content=system_template), |
|
HumanMessage(content=human_template) |
|
]) |
|
|
|
|
|
llm = HuggingFaceEndpoint( |
|
repo_id="mistralai/Mistral-7B-Instruct-v0.3", |
|
task="translation", |
|
max_new_tokens=150, |
|
do_sample=False, |
|
token=HF_key |
|
) |
|
|
|
|
|
parser = StrOutputParser() |
|
|
|
|
|
chain = prompt_template | llm | parser |
|
|
|
def translate(text, target_language): |
|
source_language = "English" |
|
input_data = { |
|
"source_language": source_language, |
|
"target_language": target_language, |
|
"text": text |
|
} |
|
|
|
result = chain.invoke(input_data) |
|
|
|
|
|
if "Assistant: " in result: |
|
translation = result.split("Assistant: ")[-1].strip() |
|
else: |
|
translation = result.strip() |
|
|
|
return translation |
|
|
|
|
|
iface = gr.Interface( |
|
fn=translate, |
|
inputs=[ |
|
gr.Textbox(lines=2, placeholder="Enter text to translate"), |
|
gr.Textbox(lines=1, placeholder="Enter target language") |
|
], |
|
outputs="text", |
|
title="LangChain Translation Service", |
|
description="Translate text using LangChain and Hugging Face." |
|
) |
|
|
|
if __name__ == "__main__": |
|
iface.launch() |
|
|