rajkhanke commited on
Commit
12d2411
·
verified ·
1 Parent(s): c028e0d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -4
app.py CHANGED
@@ -1,9 +1,16 @@
1
  from flask import Flask, render_template, request, jsonify, Response
2
  import requests
3
  from bs4 import BeautifulSoup
 
4
 
5
  app = Flask(__name__)
6
 
 
 
 
 
 
 
7
  # Internal mapping of crops to pests (for the form)
8
  CROP_TO_PESTS = {
9
  "Sorgum": ["FallArmyWorm"],
@@ -29,7 +36,7 @@ CROP_MAPPING = {
29
  "Soybean": "2",
30
  "Sugarcane": "8",
31
  "Tur": "5",
32
- "Sorgum": "6" # Adjust if needed
33
  }
34
 
35
  # Map our internal pest names to external page values per crop.
@@ -96,7 +103,7 @@ def index():
96
  # Build the external image URL (using HTTP)
97
  base_url = f"http://www.icar-crida.res.in:8080/naip/gisimages/{crop}/{year}/{pest}_"
98
  external_image_url = f"{base_url}{param}{week}.jpg"
99
- # Instead of using the external HTTP URL directly, we build our proxy URL
100
  image_url = f"/proxy-image?url={external_image_url}"
101
 
102
  return render_template('index.html',
@@ -111,7 +118,9 @@ def index():
111
  selected_param=param,
112
  image_url=image_url)
113
 
 
114
  @app.route('/fetch_weeks')
 
115
  def fetch_weeks():
116
  crop = request.args.get('crop', '')
117
  pest = request.args.get('pest', '')
@@ -140,15 +149,15 @@ def fetch_weeks():
140
  weeks = [str(i) for i in range(1, 53)]
141
  return jsonify({"weeks": weeks})
142
 
 
143
  @app.route('/proxy-image')
 
144
  def proxy_image():
145
- # Get the external URL from the query parameter
146
  external_url = request.args.get('url')
147
  if not external_url:
148
  return "Missing URL", 400
149
 
150
  try:
151
- # Fetch the image from the external server
152
  resp = requests.get(external_url, timeout=10)
153
  return Response(resp.content, mimetype=resp.headers.get('Content-Type', 'image/jpeg'))
154
  except Exception as e:
 
1
  from flask import Flask, render_template, request, jsonify, Response
2
  import requests
3
  from bs4 import BeautifulSoup
4
+ from flask_caching import Cache
5
 
6
  app = Flask(__name__)
7
 
8
+ # Configure caching – here we use a simple in-memory cache.
9
+ # In production you might use Redis or another robust backend.
10
+ app.config['CACHE_TYPE'] = 'SimpleCache'
11
+ app.config['CACHE_DEFAULT_TIMEOUT'] = 300 # Cache responses for 5 minutes
12
+ cache = Cache(app)
13
+
14
  # Internal mapping of crops to pests (for the form)
15
  CROP_TO_PESTS = {
16
  "Sorgum": ["FallArmyWorm"],
 
36
  "Soybean": "2",
37
  "Sugarcane": "8",
38
  "Tur": "5",
39
+ "Sorgum": "6"
40
  }
41
 
42
  # Map our internal pest names to external page values per crop.
 
103
  # Build the external image URL (using HTTP)
104
  base_url = f"http://www.icar-crida.res.in:8080/naip/gisimages/{crop}/{year}/{pest}_"
105
  external_image_url = f"{base_url}{param}{week}.jpg"
106
+ # Build our proxy URL to avoid direct linking
107
  image_url = f"/proxy-image?url={external_image_url}"
108
 
109
  return render_template('index.html',
 
118
  selected_param=param,
119
  image_url=image_url)
120
 
121
+ # Cache this route based on its query string.
122
  @app.route('/fetch_weeks')
123
+ @cache.cached(timeout=300, query_string=True)
124
  def fetch_weeks():
125
  crop = request.args.get('crop', '')
126
  pest = request.args.get('pest', '')
 
149
  weeks = [str(i) for i in range(1, 53)]
150
  return jsonify({"weeks": weeks})
151
 
152
+ # Cache the fetched image for a short duration to avoid repeated external calls.
153
  @app.route('/proxy-image')
154
+ @cache.cached(timeout=300, query_string=True)
155
  def proxy_image():
 
156
  external_url = request.args.get('url')
157
  if not external_url:
158
  return "Missing URL", 400
159
 
160
  try:
 
161
  resp = requests.get(external_url, timeout=10)
162
  return Response(resp.content, mimetype=resp.headers.get('Content-Type', 'image/jpeg'))
163
  except Exception as e: