File size: 2,176 Bytes
5cc3f6e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
171aa77
 
5cc3f6e
171aa77
 
 
5cc3f6e
 
 
171aa77
 
 
 
 
 
 
 
5cc3f6e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
import os
import gradio as gr
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.messages import HumanMessage, SystemMessage
from langchain_huggingface import HuggingFaceEndpoint
from langchain_core.output_parsers import StrOutputParser

# Ensure environment variables are loaded
langchain_key = os.getenv("LANGCHAIN_API_KEY")
HF_key = os.getenv("HUGGINGFACEHUB_TOKEN")

if langchain_key is None or HF_key is None:
    raise ValueError("API keys are not set. Please set LANGCHAIN_API_KEY and HUGGINGFACEHUB_TOKEN.")

os.environ["LANGCHAIN_TRACING_V2"] = "true"
os.environ["LANGCHAIN_API_KEY"] = langchain_key
os.environ['HUGGINGFACEHUB_TOKEN'] = HF_key

# 1. Prompt Template
system_template = "Translate the following from into {language}:"
human_template = "{text}"

prompt_template = ChatPromptTemplate.from_messages([
    SystemMessage(content=system_template),
    HumanMessage(content=human_template)
])

# 2. Create model
llm = HuggingFaceEndpoint(
    repo_id="mistralai/Mistral-7B-Instruct-v0.3",
    task="translation",
    max_new_tokens=150,
    do_sample=False,
    token=HF_key
)

# 3. Create parser
parser = StrOutputParser()

# 4. Create chain
chain = prompt_template | llm | parser

def translate(text, target_language):
    source_language = "English"  # Assuming the source language is English
    input_data = {
        "source_language": source_language,
        "target_language": target_language,
        "text": text
    }
    # Use the .invoke() method to properly run the chain
    result = chain.invoke(input_data)
    
    # Extract the translation from the result
    if "Assistant: " in result:
        translation = result.split("Assistant: ")[-1].strip()
    else:
        translation = result.strip()
    
    return translation

# Gradio interface
iface = gr.Interface(
    fn=translate,
    inputs=[
        gr.Textbox(lines=2, placeholder="Enter text to translate"),
        gr.Textbox(lines=1, placeholder="Enter target language")
    ],
    outputs="text",
    title="LangChain Translation Service",
    description="Translate text using LangChain and Hugging Face."
)

if __name__ == "__main__":
    iface.launch()