Neurolingua commited on
Commit
bbe5a2f
·
verified ·
1 Parent(s): 9bdc6ad

Update other_function.py

Browse files
Files changed (1) hide show
  1. other_function.py +18 -13
other_function.py CHANGED
@@ -1,5 +1,7 @@
1
  import os
2
  from bs4 import BeautifulSoup
 
 
3
  import requests
4
  from requests.auth import HTTPBasicAuth
5
  from PIL import Image
@@ -17,6 +19,11 @@ from ai71 import AI71
17
  import os
18
  import PyPDF2
19
  import pandas as pd
 
 
 
 
 
20
  def extract_text_from_image(image_path):
21
  img = cv2.imread(image_path)
22
  if img is None:
@@ -35,19 +42,17 @@ if not os.path.exists(UPLOAD_FOLDER):
35
  pdf_text=''
36
  AI71_API_KEY = os.environ.get('AI71_API_KEY')
37
  def generate_response(query,chat_history):
38
- response = ''
39
- for chunk in AI71(AI71_API_KEY).chat.completions.create(
40
- model="tiiuae/falcon-180b-chat",
41
- messages=[
42
- {"role": "system", "content": "You are a best agricultural assistant.Remember to give response not more than 2 sentence.Greet the user if user greets you."},
43
- {"role": "user",
44
- "content": f'''Answer the query based on history {chat_history}:{query}'''},
45
- ],
46
- stream=True,
47
- ):
48
- if chunk.choices[0].delta.content:
49
- response += chunk.choices[0].delta.content
50
- return response.replace("###", '').replace('\nUser:','')
51
  class ConversationBufferMemory:
52
  def __init__(self, max_size):
53
  self.memory = []
 
1
  import os
2
  from bs4 import BeautifulSoup
3
+ import os
4
+ from mistralai import Mistral
5
  import requests
6
  from requests.auth import HTTPBasicAuth
7
  from PIL import Image
 
19
  import os
20
  import PyPDF2
21
  import pandas as pd
22
+ model = "mistral-large-latest"
23
+ api_key='xQ2Zhfsp4cLar4lvBRDWZKljvp0Ej427'
24
+
25
+ client = Mistral(api_key=api_key)
26
+
27
  def extract_text_from_image(image_path):
28
  img = cv2.imread(image_path)
29
  if img is None:
 
42
  pdf_text=''
43
  AI71_API_KEY = os.environ.get('AI71_API_KEY')
44
  def generate_response(query,chat_history):
45
+
46
+ chat_response = client.chat.complete(
47
+ model= model,
48
+ messages = [
49
+ {
50
+ "role": "user",
51
+ "content": f"{User_querry}? provide response within 2 sentence",
52
+ },
53
+ ]
54
+ )
55
+ return chat_response.choices[0].message.content
 
 
56
  class ConversationBufferMemory:
57
  def __init__(self, max_size):
58
  self.memory = []