datacipen commited on
Commit
e937da8
·
verified ·
1 Parent(s): ee9a184

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +20 -5
main.py CHANGED
@@ -8,11 +8,21 @@ import uuid
8
  import langsmith as ls
9
  from pathlib import Path
10
  import chainlit as cl
11
- from mistralai.client import MistralClient
12
- from mistralai import Mistral, UserMessage, SystemMessage
 
 
 
 
 
 
 
 
13
  from offres_emploi import Api
14
  from offres_emploi.utils import dt_to_str_iso
15
- os.environ["GITHUB_TOKEN"] = os.environ["GITHUB_TOKEN"]
 
 
16
  session_id = str(uuid.uuid4())
17
 
18
  @cl.step(type="tool", show_input=True)
@@ -109,8 +119,13 @@ def API_France_Travail_Metier(metier):
109
 
110
  @cl.step(type="llm", show_input=True)
111
  def Connexion_Mistral():
112
- endpoint = "https://models.inference.ai.azure.com"
113
- return Mistral(api_key=os.environ["GITHUB_TOKEN"], server_url=endpoint)
 
 
 
 
 
114
 
115
  @cl.step(type="tool", show_input=True)
116
  @ls.traceable(run_type="llm", name="Connexion à Mistral : paramétrages de la conversation")
 
8
  import langsmith as ls
9
  from pathlib import Path
10
  import chainlit as cl
11
+ #from mistralai.client import MistralClient
12
+ #from mistralai import Mistral, UserMessage, SystemMessage
13
+
14
+ from operator import itemgetter
15
+ from langchain_huggingface import HuggingFaceEndpoint
16
+ from langchain.schema.runnable import Runnable, RunnablePassthrough, RunnableLambda
17
+ from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder
18
+ from langchain.memory import ChatMessageHistory, ConversationBufferMemory
19
+ from langchain.schema.runnable.config import RunnableConfig
20
+
21
  from offres_emploi import Api
22
  from offres_emploi.utils import dt_to_str_iso
23
+ #os.environ["GITHUB_TOKEN"] = os.environ["GITHUB_TOKEN"]
24
+ os.environ['HUGGINGFACEHUB_API_TOKEN'] = os.environ['HUGGINGFACEHUB_API_TOKEN']
25
+
26
  session_id = str(uuid.uuid4())
27
 
28
  @cl.step(type="tool", show_input=True)
 
119
 
120
  @cl.step(type="llm", show_input=True)
121
  def Connexion_Mistral():
122
+ #endpoint = "https://models.inference.ai.azure.com"
123
+ #return Mistral(api_key=os.environ["GITHUB_TOKEN"], server_url=endpoint)
124
+ repo_id = "mistralai/Mistral-7B-Instruct-v0.2"
125
+ llm = HuggingFaceEndpoint(
126
+ repo_id=repo_id, max_new_tokens=5300, temperature=0.5, task="text2text-generation", streaming=True
127
+ )
128
+ return llm
129
 
130
  @cl.step(type="tool", show_input=True)
131
  @ls.traceable(run_type="llm", name="Connexion à Mistral : paramétrages de la conversation")