Spaces:
Sleeping
Sleeping
File size: 6,757 Bytes
ed946ba d38f151 8f4d406 12d2411 8f4d406 12d2411 ed946ba 8f4d406 12d2411 8f4d406 033af54 8f4d406 d38f151 033af54 d38f151 ed946ba 8f4d406 12d2411 8f4d406 d38f151 8f4d406 ed946ba d38f151 ed946ba 8f4d406 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 |
import threading, time
from flask import Flask, render_template, request, jsonify, Response
import requests
from bs4 import BeautifulSoup
from flask_caching import Cache
app = Flask(__name__)
# Configure caching β here we use a simple in-memory cache.
# In production you might use Redis or another robust backend.
app.config['CACHE_TYPE'] = 'SimpleCache'
app.config['CACHE_DEFAULT_TIMEOUT'] = 300 # Cache responses for 5 minutes
cache = Cache(app)
# Global variables for pre-fetched image data
cached_image = None
cached_mimetype = None
# Internal mapping of crops to pests (for the form)
CROP_TO_PESTS = {
"Sorgum": ["FallArmyWorm"],
"Maize": ["FallArmyWorm"],
"Rice": ["Blast", "GallMidge", "YSB", "PlantHopper", "BlueBeetle", "BacterialLeafBlight"],
"Cotton": ["Thrips", "Whitefly", "PinkBollworm", "Jassid", "BollRot", "AmericanBollworm"],
"Soybean": ["Girdlebeetle", "H.armigera", "Semilooper", "Spodoptera", "StemFLy"],
"Tur": ["Wilt", "Webbed_Leaves", "Pod_damage"],
"Sugarcane": ["FallArmyGrub", "WhiteGrub"],
"Gram": ["H.armigera", "Wilt"]
}
# Fixed year options for the form
YEARS = ["2024-25", "2023-24", "2022-23", "2021-22"]
# Map our internal crop names to the external page's crop values.
CROP_MAPPING = {
"Cotton": "1",
"Gram": "4",
"Maize": "7",
"Rice": "3",
"Sorghum": "6",
"Soybean": "2",
"Sugarcane": "8",
"Tur": "5",
"Sorgum": "6"
}
# Map our internal pest names to external page values per crop.
PEST_MAPPING = {
"Cotton": {
"FallArmyWorm": "71"
},
"Gram": {
"H.armigera": "72",
"Wilt": "73"
},
"Maize": {
"FallArmyWorm": "74"
},
"Rice": {
"Blast": "75",
"GallMidge": "76",
"YSB": "77",
"PlantHopper": "78",
"BlueBeetle": "79",
"BacterialLeafBlight": "80"
},
"Soybean": {
"Girdlebeetle": "81",
"H.armigera": "82",
"Semilooper": "83",
"Spodoptera": "84",
"StemFLy": "85"
},
"Tur": {
"Wilt": "86",
"Webbed_Leaves": "87",
"Pod_damage": "88"
},
"Sugarcane": {
"FallArmyGrub": "89",
"WhiteGrub": "90"
},
"Sorgum": {
"FallArmyWorm": "91"
}
}
# Parameter codes and labels for the final image URL
PARAMS = {
"Mint": "Min Temperature",
"Maxt": "Max Temperature",
"RH": "Relative Humidity",
"RF": "Rainfall",
"PR": "Pest Report"
}
@app.route('/')
def index():
# Read query parameters (if provided)
crop = request.args.get('crop', '')
pest = request.args.get('pest', '')
year = request.args.get('year', '')
week = request.args.get('week', '')
param = request.args.get('param', '')
image_url = ""
if crop and pest and year and week and param:
# Build the external image URL (using HTTP)
base_url = f"http://www.icar-crida.res.in:8080/naip/gisimages/{crop}/{year}/{pest}_"
external_image_url = f"{base_url}{param}{week}.jpg"
# Build our proxy URL so that the image is served via our app
image_url = f"/fast-proxy?url={external_image_url}"
return render_template('index.html',
crops=list(CROP_TO_PESTS.keys()),
crop_to_pests=CROP_TO_PESTS,
years=YEARS,
params=PARAMS,
selected_crop=crop,
selected_pest=pest,
selected_year=year,
selected_week=week,
selected_param=param,
image_url=image_url)
@app.route('/fetch_weeks')
@cache.cached(timeout=300, query_string=True)
def fetch_weeks():
crop = request.args.get('crop', '')
pest = request.args.get('pest', '')
year = request.args.get('year', '')
ext_crop = CROP_MAPPING.get(crop, '')
ext_pest = ""
if crop in PEST_MAPPING and pest in PEST_MAPPING[crop]:
ext_pest = PEST_MAPPING[crop][pest]
payload = {
"country": ext_crop,
"city": ext_pest,
"sowing": year
}
weeks = []
try:
response = requests.get("http://www.icar-crida.res.in:8080/naip/gismaps.jsp", params=payload, timeout=10)
soup = BeautifulSoup(response.text, 'html.parser')
week_options = soup.select('select[name="week"] option')
weeks = [opt.get('value') for opt in week_options if opt.get('value') and "Select" not in opt.get('value')]
if not weeks:
weeks = [str(i) for i in range(1, 53)]
except Exception as e:
weeks = [str(i) for i in range(1, 53)]
return jsonify({"weeks": weeks})
# This endpoint now uses a background-prefetched image.
@app.route('/fast-proxy')
def fast_proxy():
global cached_image, cached_mimetype
# If the "url" parameter doesn't match our cached URL, you might consider triggering a prefetch update.
external_url = request.args.get('url')
if cached_image and external_url == prefetch_image.cached_url:
return Response(cached_image, mimetype=cached_mimetype)
else:
# If no pre-fetched image is available, fall back to a direct fetch.
try:
resp = requests.get(external_url, timeout=10)
return Response(resp.content, mimetype=resp.headers.get('Content-Type', 'image/jpeg'))
except Exception as e:
return str(e), 500
# Background prefetching β we also store the URL that was prefetched.
def prefetch_image_task():
global cached_image, cached_mimetype
# For demonstration, we prefetch one specific image.
# In a more dynamic scenario, you could maintain a dict of URL -> image data.
while True:
# Here, update the URL as needed. For example, if you have default parameters:
url = "http://www.icar-crida.res.in:8080/naip/gisimages/7/2024-25/74_Maxt1.jpg"
try:
response = requests.get(url, timeout=10)
if response.status_code == 200:
cached_image = response.content
cached_mimetype = response.headers.get('Content-Type', 'image/jpeg')
prefetch_image_task.cached_url = url
print("Prefetched image from", url)
else:
print("Failed to prefetch, status code:", response.status_code)
except Exception as ex:
print("Prefetch error:", ex)
time.sleep(300) # Update every 5 minutes
# Initialize cached_url attribute for our task function.
prefetch_image_task.cached_url = ""
# Start the background prefetch thread
threading.Thread(target=prefetch_image_task, daemon=True).start()
if __name__ == '__main__':
app.run(debug=True)
|