|
import os |
|
from bs4 import BeautifulSoup |
|
import requests |
|
from requests.auth import HTTPBasicAuth |
|
from PIL import Image |
|
from io import BytesIO |
|
import pandas as pd |
|
from urllib.parse import urlparse |
|
import os |
|
from pypdf import PdfReader |
|
from ai71 import AI71 |
|
import os |
|
|
|
from inference_sdk import InferenceHTTPClient |
|
import base64 |
|
UPLOAD_FOLDER = '/code/uploads' |
|
if not os.path.exists(UPLOAD_FOLDER): |
|
os.makedirs(UPLOAD_FOLDER) |
|
|
|
AI71_API_KEY = os.environ.get('AI71_API_KEY') |
|
def generate_response(query,chat_history): |
|
response = '' |
|
for chunk in AI71(AI71_API_KEY).chat.completions.create( |
|
model="tiiuae/falcon-180b-chat", |
|
messages=[ |
|
{"role": "system", "content": "You are a best agricultural assistant.Remember to give response not more than 2 sentence.Greet the user if user greets you."}, |
|
{"role": "user", |
|
"content": f'''Answer the query based on history {chat_history}:{query}'''}, |
|
], |
|
stream=True, |
|
): |
|
if chunk.choices[0].delta.content: |
|
response += chunk.choices[0].delta.content |
|
return response.replace("###", '').replace('\nUser:','') |
|
class ConversationBufferMemory: |
|
def __init__(self, max_size=6): |
|
self.memory = [] |
|
self.max_size = max_size |
|
|
|
def add_to_memory(self, interaction): |
|
self.memory.append(interaction) |
|
if len(self.memory) > self.max_size: |
|
self.memory.pop(0) |
|
|
|
def get_memory(self): |
|
return self.memory |
|
def predict_pest(filepath): |
|
CLIENT = InferenceHTTPClient( |
|
api_url="https://detect.roboflow.com", |
|
api_key="oF1aC4b1FBCDtK8CoKx7" |
|
) |
|
result = CLIENT.infer(filepath, model_id="pest-detection-ueoco/1") |
|
return result['predictions'][0] |
|
|
|
|
|
def predict_disease(filepath): |
|
CLIENT = InferenceHTTPClient( |
|
api_url="https://classify.roboflow.com", |
|
api_key="oF1aC4b1FBCDtK8CoKx7" |
|
) |
|
result = CLIENT.infer(filepath, model_id="plant-disease-detection-iefbi/1") |
|
return result['predicted_classes'][0] |
|
|
|
def convert_img(url, account_sid, auth_token): |
|
try: |
|
|
|
response = requests.get(url, auth=HTTPBasicAuth(account_sid, auth_token)) |
|
response.raise_for_status() |
|
|
|
|
|
parsed_url = urlparse(url) |
|
media_id = parsed_url.path.split('/')[-1] |
|
filename = f"downloaded_media_{media_id}" |
|
|
|
|
|
media_filepath = os.path.join(UPLOAD_FOLDER, filename) |
|
with open(media_filepath, 'wb') as file: |
|
file.write(response.content) |
|
|
|
print(f"Media downloaded successfully and saved as {media_filepath}") |
|
|
|
|
|
with open(media_filepath, 'rb') as img_file: |
|
image = Image.open(img_file) |
|
|
|
|
|
converted_filename = f"image.jpg" |
|
converted_filepath = os.path.join(UPLOAD_FOLDER, converted_filename) |
|
image.convert('RGB').save(converted_filepath, 'JPEG') |
|
return converted_filepath |
|
|
|
except requests.exceptions.HTTPError as err: |
|
print(f"HTTP error occurred: {err}") |
|
except Exception as err: |
|
print(f"An error occurred: {err}") |
|
def get_weather(city): |
|
city=city.strip() |
|
city=city.replace(' ',"+") |
|
r = requests.get(f'https://www.google.com/search?q=weather+in+{city}') |
|
|
|
soup=BeautifulSoup(r.text,'html.parser') |
|
temperature=soup.find('div',attrs={'class':'BNeawe iBp4i AP7Wnd'}).text |
|
degree=temperature[:-2] |
|
celcius=str(round((int(degree) - 32)* 5/9,1))+temperature[-2]+'C' |
|
return (celcius) |
|
|
|
def get_rates(): |
|
r = requests.get('https://www.kisandeals.com/mandiprices/ALL/TAMIL-NADU/ALL') |
|
soup = BeautifulSoup(r.text, 'html.parser') |
|
|
|
|
|
table = soup.find('table') |
|
|
|
|
|
if table: |
|
|
|
df = pd.read_html(str(table))[0] |
|
|
|
|
|
if 'Quintal Price' in df.columns: |
|
df.drop(columns=['Quintal Price'], inplace=True) |
|
|
|
|
|
d = {} |
|
for i in range(len(df)): |
|
d[df.iloc[i, 0]] = df.iloc[i, 1] |
|
|
|
return str(d) + ' These prices are for 1 kg' |
|
else: |
|
return "No table found on the page" |