rajkhanke commited on
Commit
d38f151
·
verified ·
1 Parent(s): c2cbf85

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +21 -17
app.py CHANGED
@@ -1,4 +1,4 @@
1
- from flask import Flask, render_template, request, jsonify
2
  import requests
3
  from bs4 import BeautifulSoup
4
 
@@ -82,7 +82,6 @@ PARAMS = {
82
  "PR": "Pest Report"
83
  }
84
 
85
-
86
  @app.route('/')
87
  def index():
88
  # Read query parameters (if provided)
@@ -94,10 +93,11 @@ def index():
94
 
95
  image_url = ""
96
  if crop and pest and year and week and param:
97
- # Build image URL using the pattern:
98
- # http://www.icar-crida.res.in:8080/naip/gisimages/{CROP}/{YEAR}/{PEST}_{PARAM}{WEEK}.jpg
99
  base_url = f"http://www.icar-crida.res.in:8080/naip/gisimages/{crop}/{year}/{pest}_"
100
- image_url = f"{base_url}{param}{week}.jpg"
 
 
101
 
102
  return render_template('index.html',
103
  crops=list(CROP_TO_PESTS.keys()),
@@ -111,37 +111,28 @@ def index():
111
  selected_param=param,
112
  image_url=image_url)
113
 
114
-
115
  @app.route('/fetch_weeks')
116
  def fetch_weeks():
117
- """
118
- Dynamically fetch available week options by sending a GET request
119
- with the selected crop, pest, and year to the external JSP page.
120
- """
121
  crop = request.args.get('crop', '')
122
  pest = request.args.get('pest', '')
123
  year = request.args.get('year', '')
124
 
125
- # Use our mappings to get external values
126
  ext_crop = CROP_MAPPING.get(crop, '')
127
  ext_pest = ""
128
  if crop in PEST_MAPPING and pest in PEST_MAPPING[crop]:
129
  ext_pest = PEST_MAPPING[crop][pest]
130
 
131
- # Build query parameters as expected by the external page
132
  payload = {
133
- "country": ext_crop, # external crop value
134
- "city": ext_pest, # external pest value
135
- "sowing": year # year remains the same
136
  }
137
 
138
  weeks = []
139
  try:
140
- # Use GET request with parameters
141
  response = requests.get("http://www.icar-crida.res.in:8080/naip/gismaps.jsp", params=payload, timeout=10)
142
  soup = BeautifulSoup(response.text, 'html.parser')
143
  week_options = soup.select('select[name="week"] option')
144
- # Filter out default options (e.g., those containing "Select")
145
  weeks = [opt.get('value') for opt in week_options if opt.get('value') and "Select" not in opt.get('value')]
146
  if not weeks:
147
  weeks = [str(i) for i in range(1, 53)]
@@ -149,6 +140,19 @@ def fetch_weeks():
149
  weeks = [str(i) for i in range(1, 53)]
150
  return jsonify({"weeks": weeks})
151
 
 
 
 
 
 
 
 
 
 
 
 
 
 
152
 
153
  if __name__ == '__main__':
154
  app.run(debug=True)
 
1
+ from flask import Flask, render_template, request, jsonify, Response
2
  import requests
3
  from bs4 import BeautifulSoup
4
 
 
82
  "PR": "Pest Report"
83
  }
84
 
 
85
  @app.route('/')
86
  def index():
87
  # Read query parameters (if provided)
 
93
 
94
  image_url = ""
95
  if crop and pest and year and week and param:
96
+ # Build the external image URL (using HTTP)
 
97
  base_url = f"http://www.icar-crida.res.in:8080/naip/gisimages/{crop}/{year}/{pest}_"
98
+ external_image_url = f"{base_url}{param}{week}.jpg"
99
+ # Instead of using the external HTTP URL directly, we build our proxy URL
100
+ image_url = f"/proxy-image?url={external_image_url}"
101
 
102
  return render_template('index.html',
103
  crops=list(CROP_TO_PESTS.keys()),
 
111
  selected_param=param,
112
  image_url=image_url)
113
 
 
114
  @app.route('/fetch_weeks')
115
  def fetch_weeks():
 
 
 
 
116
  crop = request.args.get('crop', '')
117
  pest = request.args.get('pest', '')
118
  year = request.args.get('year', '')
119
 
 
120
  ext_crop = CROP_MAPPING.get(crop, '')
121
  ext_pest = ""
122
  if crop in PEST_MAPPING and pest in PEST_MAPPING[crop]:
123
  ext_pest = PEST_MAPPING[crop][pest]
124
 
 
125
  payload = {
126
+ "country": ext_crop,
127
+ "city": ext_pest,
128
+ "sowing": year
129
  }
130
 
131
  weeks = []
132
  try:
 
133
  response = requests.get("http://www.icar-crida.res.in:8080/naip/gismaps.jsp", params=payload, timeout=10)
134
  soup = BeautifulSoup(response.text, 'html.parser')
135
  week_options = soup.select('select[name="week"] option')
 
136
  weeks = [opt.get('value') for opt in week_options if opt.get('value') and "Select" not in opt.get('value')]
137
  if not weeks:
138
  weeks = [str(i) for i in range(1, 53)]
 
140
  weeks = [str(i) for i in range(1, 53)]
141
  return jsonify({"weeks": weeks})
142
 
143
+ @app.route('/proxy-image')
144
+ def proxy_image():
145
+ # Get the external URL from the query parameter
146
+ external_url = request.args.get('url')
147
+ if not external_url:
148
+ return "Missing URL", 400
149
+
150
+ try:
151
+ # Fetch the image from the external server
152
+ resp = requests.get(external_url, timeout=10)
153
+ return Response(resp.content, mimetype=resp.headers.get('Content-Type', 'image/jpeg'))
154
+ except Exception as e:
155
+ return str(e), 500
156
 
157
  if __name__ == '__main__':
158
  app.run(debug=True)