|
import os
|
|
import requests
|
|
from requests.auth import HTTPBasicAuth
|
|
from PIL import Image
|
|
from io import BytesIO
|
|
from urllib.parse import urlparse
|
|
import os
|
|
|
|
|
|
from inference_sdk import InferenceHTTPClient
|
|
import base64
|
|
UPLOAD_FOLDER = '/code/uploads'
|
|
if not os.path.exists(UPLOAD_FOLDER):
|
|
os.makedirs(UPLOAD_FOLDER)
|
|
|
|
def predict_pest(filepath):
|
|
CLIENT = InferenceHTTPClient(
|
|
api_url="https://detect.roboflow.com",
|
|
api_key="oF1aC4b1FBCDtK8CoKx7"
|
|
)
|
|
result = CLIENT.infer(filepath, model_id="pest-detection-ueoco/1")
|
|
return result['predicted_classes'][0]
|
|
|
|
|
|
def predict_disease(filepath):
|
|
CLIENT = InferenceHTTPClient(
|
|
api_url="https://classify.roboflow.com",
|
|
api_key="oF1aC4b1FBCDtK8CoKx7"
|
|
)
|
|
result = CLIENT.infer(filepath, model_id="plant-disease-detection-iefbi/1")
|
|
return result['predicted_classes'][0]
|
|
|
|
def convert_img(url, account_sid, auth_token):
|
|
try:
|
|
|
|
response = requests.get(url, auth=HTTPBasicAuth(account_sid, auth_token))
|
|
response.raise_for_status()
|
|
|
|
|
|
parsed_url = urlparse(url)
|
|
media_id = parsed_url.path.split('/')[-1]
|
|
filename = f"downloaded_media_{media_id}"
|
|
|
|
|
|
media_filepath = os.path.join(UPLOAD_FOLDER, filename)
|
|
with open(media_filepath, 'wb') as file:
|
|
file.write(response.content)
|
|
|
|
print(f"Media downloaded successfully and saved as {media_filepath}")
|
|
|
|
|
|
with open(media_filepath, 'rb') as img_file:
|
|
image = Image.open(img_file)
|
|
|
|
|
|
converted_filename = f"image.jpg"
|
|
converted_filepath = os.path.join(UPLOAD_FOLDER, converted_filename)
|
|
image.convert('RGB').save(converted_filepath, 'JPEG')
|
|
return converted_filepath
|
|
|
|
except requests.exceptions.HTTPError as err:
|
|
print(f"HTTP error occurred: {err}")
|
|
except Exception as err:
|
|
print(f"An error occurred: {err}") |