Sahand
commited on
Commit
·
6b83428
1
Parent(s):
d2d5d81
initial commit
Browse files- CHANGELON.md +12 -0
- Dockerfile.dash +6 -0
- __init__.py +0 -0
- admin.py +31 -0
- app-v2.py +91 -0
- app.py +359 -0
- app.yaml +15 -0
- assets/icons/fail.png +0 -0
- assets/icons/success.png +0 -0
- assets/icons/wait.png +0 -0
- dashboard/__init__.py +0 -0
- jobadd/__init__.py +0 -0
- libs/dashapp.py +947 -0
- libs/utils.py +31 -0
- model/__init__.py +0 -0
- model/client.py +0 -0
- model/farm.py +0 -0
- model/job.py +22 -0
- model/project.py +0 -0
- register/__init__.py +0 -0
- requirements/requirements-dev-dash.txt +10 -0
- requirements/requirements-dev-vis.txt +26 -0
- requirements/requirements-vis-system.txt +1 -0
- requirements/requuirements-gcp.txt +5 -0
- scripts/__init__.py +0 -0
- scripts/analyse.py +468 -0
- scripts/animate.py +214 -0
- scripts/assets/icons/fail.png +0 -0
- scripts/assets/icons/success.png +0 -0
- scripts/assets/icons/wait.png +0 -0
- scripts/display.py +651 -0
- scripts/sankey.py +185 -0
- scripts/simple_html.py +391 -0
- secrets/README.md +1 -0
- services/__init__.py +0 -0
- services/job_management.py +66 -0
- setup.cfg +20 -0
- templates/admin.html +28 -0
- templates/base.html +31 -0
- templates/dash_app.html +29 -0
- templates/index.html +7 -0
- templates/job_add.html +34 -0
- webservice.py +0 -0
CHANGELON.md
ADDED
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
## 09/08/2023
|
2 |
+
WIP - services are being added. Added a draft of job management service.
|
3 |
+
|
4 |
+
|
5 |
+
## 08/08/2023
|
6 |
+
|
7 |
+
* Added diagrams
|
8 |
+
* Added db model
|
9 |
+
* Dash App is a working prototype. To be added is the quantiles.
|
10 |
+
* Added a new file to the repo: `requirements.txt` to be used for deployment.
|
11 |
+
* Intial app structure has gone through design and a draft is ready.
|
12 |
+
* There are currently 2 versions of the app. One is messy and works. THe app-v2 is a cleaner version but is not working yet.
|
Dockerfile.dash
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
FROM python:3.9
|
2 |
+
WORKDIR /
|
3 |
+
COPY ./requirements/requirements-dev-vis.txt /requirements-dev-vis.txt
|
4 |
+
RUN pip install --no-cache-dir --upgrade -r /requirements-dev-vis.txt
|
5 |
+
COPY . .
|
6 |
+
CMD ["python", "app.py","--input","Data/{}"]
|
__init__.py
ADDED
File without changes
|
admin.py
ADDED
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
from flask import Flask, render_template
|
3 |
+
from flask_admin import Admin
|
4 |
+
from flask_sqlalchemy import SQLAlchemy
|
5 |
+
|
6 |
+
# Consts
|
7 |
+
SQLPROTOCOL = "mysql://" # "sqlite:////"
|
8 |
+
SQLUNAME = "root"
|
9 |
+
SQLPASS = "Sahy1990!"
|
10 |
+
SQLHOST = "localhost"
|
11 |
+
SQLDB = "nowcasting"
|
12 |
+
|
13 |
+
# App
|
14 |
+
app = Flask(__name__)
|
15 |
+
app.config["SECRET_KEY"] = "your-secret-key"
|
16 |
+
app.config[
|
17 |
+
"SQLALCHEMY_DATABASE_URI"
|
18 |
+
] = f"{SQLPROTOCOL}{SQLUNAME}:{SQLPASS}@{SQLHOST}/{SQLDB}"
|
19 |
+
db = SQLAlchemy(app)
|
20 |
+
admin = Admin(app, name="My Admin Panel", template_mode="bootstrap4")
|
21 |
+
|
22 |
+
|
23 |
+
@app.route("/")
|
24 |
+
def home():
|
25 |
+
return render_template(
|
26 |
+
"admin.html"
|
27 |
+
) # "Welcome to Nowcasting App.<br><br>The dashapp is still under development..."
|
28 |
+
|
29 |
+
|
30 |
+
if __name__ == "__main__":
|
31 |
+
app.run(debug=True)
|
app-v2.py
ADDED
@@ -0,0 +1,91 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
import argparse
|
3 |
+
import logging
|
4 |
+
import os
|
5 |
+
import traceback
|
6 |
+
|
7 |
+
import routes
|
8 |
+
from flask import Flask
|
9 |
+
from flask_sqlalchemy import SQLAlchemy
|
10 |
+
|
11 |
+
from libs.utils import setup_logging
|
12 |
+
|
13 |
+
# from marshmallow import Schema, fields
|
14 |
+
|
15 |
+
|
16 |
+
setup_logging()
|
17 |
+
log = logging.getLogger(__name__)
|
18 |
+
|
19 |
+
|
20 |
+
def create_the_database(db):
|
21 |
+
db.create_all()
|
22 |
+
|
23 |
+
|
24 |
+
# ===============================================================================
|
25 |
+
|
26 |
+
SQLPROTOCOL = "mysql://" # "sqlite:////"
|
27 |
+
SQLUNAME = "root"
|
28 |
+
SQLPASS = ""
|
29 |
+
SQLHOST = "localhost"
|
30 |
+
SQLDB = "nowcasting"
|
31 |
+
|
32 |
+
app = Flask(__name__)
|
33 |
+
app.config["SECRET_KEY"] = "A secret for nowcasting in SIH"
|
34 |
+
|
35 |
+
all_methods = ["GET", "POST"]
|
36 |
+
|
37 |
+
# Home page (where you will add a new user)
|
38 |
+
app.add_url_rule("/", view_func=routes.index)
|
39 |
+
# "Thank you for submitting your form" page
|
40 |
+
app.add_url_rule("/submitted", methods=all_methods, view_func=routes.submitted)
|
41 |
+
# Viewing all the content in the database page
|
42 |
+
app.add_url_rule("/database", view_func=routes.view_database)
|
43 |
+
app.add_url_rule(
|
44 |
+
"/modify<the_id>/<modified_category>",
|
45 |
+
methods=all_methods,
|
46 |
+
view_func=routes.modify_database,
|
47 |
+
)
|
48 |
+
app.add_url_rule("/delete<the_id>", methods=all_methods, view_func=routes.delete)
|
49 |
+
|
50 |
+
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False # no warning messages
|
51 |
+
app.config[
|
52 |
+
"SQLALCHEMY_DATABASE_URI"
|
53 |
+
] = "${SQLPROTOCOL}${SQLUNAME}:${SQLPASS}@${SQLHOST}/${SQLDB}"
|
54 |
+
db = SQLAlchemy(app)
|
55 |
+
|
56 |
+
# ===============================================================================
|
57 |
+
|
58 |
+
|
59 |
+
# ===============================================================================
|
60 |
+
|
61 |
+
if __name__ == "__main__":
|
62 |
+
# Load Configs
|
63 |
+
parser = argparse.ArgumentParser(
|
64 |
+
description="Download rainfall data from Google Earth Engine for a range of dates.",
|
65 |
+
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
|
66 |
+
)
|
67 |
+
parser.add_argument(
|
68 |
+
"-i",
|
69 |
+
"--input",
|
70 |
+
help="Absolute or relative path to the netcdf data directory for each farm. Should be in this format: '/path/to/farm/{}/soilwatermodel'",
|
71 |
+
default=os.path.join(
|
72 |
+
os.path.expanduser("~"), "Data/results_default/{}/soilwatermodel"
|
73 |
+
),
|
74 |
+
)
|
75 |
+
parser.add_argument(
|
76 |
+
"-d",
|
77 |
+
"--debug",
|
78 |
+
help="Debug mode as True or False. Default is True.",
|
79 |
+
default=True,
|
80 |
+
)
|
81 |
+
|
82 |
+
args = parser.parse_args()
|
83 |
+
INPUT = args.input
|
84 |
+
|
85 |
+
try:
|
86 |
+
# dashapp.run_server(debug=args.debug)
|
87 |
+
app.run(debug=args.debug)
|
88 |
+
except Exception as e:
|
89 |
+
print("Error in main.py:", e)
|
90 |
+
traceback.print_exc()
|
91 |
+
raise e
|
app.py
ADDED
@@ -0,0 +1,359 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import argparse
|
4 |
+
import datetime
|
5 |
+
import logging
|
6 |
+
import os
|
7 |
+
import traceback
|
8 |
+
from typing import Dict, List
|
9 |
+
|
10 |
+
# import dash_html_components as html
|
11 |
+
# import flask
|
12 |
+
import numpy as np
|
13 |
+
import plotly.express as px
|
14 |
+
from dash import Dash, Input, Output, State # , dcc # ,html
|
15 |
+
from dash.exceptions import PreventUpdate
|
16 |
+
from flask import Flask, render_template
|
17 |
+
|
18 |
+
from libs.dashapp import layout, open_image, perform_analysis
|
19 |
+
from libs.utils import setup_logging
|
20 |
+
from libs.utils import verbose as vprint
|
21 |
+
|
22 |
+
setup_logging()
|
23 |
+
log = logging.getLogger(__name__)
|
24 |
+
CONFIG = {}
|
25 |
+
V = 1
|
26 |
+
V_IGNORE = [] # Debug, Warning, Error
|
27 |
+
MODEL_PATH = ""
|
28 |
+
|
29 |
+
# ===============================================================================
|
30 |
+
# The Main App
|
31 |
+
# ===============================================================================
|
32 |
+
app = Flask(__name__)
|
33 |
+
|
34 |
+
|
35 |
+
@app.route("/")
|
36 |
+
def home():
|
37 |
+
return render_template(
|
38 |
+
"index.html"
|
39 |
+
) # "Welcome to Nowcasting App.<br><br>The dashapp is still under development..."
|
40 |
+
|
41 |
+
|
42 |
+
@app.route("/register")
|
43 |
+
def register():
|
44 |
+
return "Welcome to Nowcasting signup page.<br><br>The dashapp is still under development..."
|
45 |
+
|
46 |
+
|
47 |
+
@app.route("/addjob")
|
48 |
+
def addjob():
|
49 |
+
return "Welcome to Nowcasting signup page.<br><br>The dashapp is still under development..."
|
50 |
+
|
51 |
+
|
52 |
+
@app.route("/dashboard")
|
53 |
+
def dashboard():
|
54 |
+
return "Welcome to Nowcasting dashboard page.<br><br>This page will contain dashboard for the prediction pipelines, and allow you to manage and add new predictions.<br>The dashapp is still under development..."
|
55 |
+
# dashapp
|
56 |
+
|
57 |
+
|
58 |
+
# ===============================================================================
|
59 |
+
# Soil Moisture Comparison Tool App Layout
|
60 |
+
# ===============================================================================
|
61 |
+
# external JavaScript files
|
62 |
+
external_scripts = [
|
63 |
+
"https://www.google-analytics.com/analytics.js",
|
64 |
+
{"src": "https://cdn.polyfill.io/v2/polyfill.min.js"},
|
65 |
+
{
|
66 |
+
"src": "https://cdnjs.cloudflare.com/ajax/libs/lodash.js/4.17.10/lodash.core.js",
|
67 |
+
"integrity": "sha256-Qqd/EfdABZUcAxjOkMi8eGEivtdTkh3b65xCZL4qAQA=",
|
68 |
+
"crossorigin": "anonymous",
|
69 |
+
},
|
70 |
+
]
|
71 |
+
|
72 |
+
# external CSS stylesheets
|
73 |
+
external_stylesheets = [
|
74 |
+
"https://codepen.io/chriddyp/pen/bWLwgP.css",
|
75 |
+
{
|
76 |
+
"href": "https://stackpath.bootstrapcdn.com/bootstrap/4.1.3/css/bootstrap.min.css",
|
77 |
+
"rel": "stylesheet",
|
78 |
+
"integrity": "sha384-MCw98/SFnGE8fJT3GXwEOngsV7Zt27NXFoaoApmYm81iuXoPkFOJwJ8ERdknLPMO",
|
79 |
+
"crossorigin": "anonymous",
|
80 |
+
},
|
81 |
+
]
|
82 |
+
|
83 |
+
|
84 |
+
dashapp = Dash(
|
85 |
+
__name__,
|
86 |
+
server=app,
|
87 |
+
routes_pathname_prefix="/dashapp/",
|
88 |
+
external_scripts=external_scripts,
|
89 |
+
external_stylesheets=external_stylesheets,
|
90 |
+
title="Soil Moisture Comparison Tool",
|
91 |
+
update_title="Loading the tool...",
|
92 |
+
)
|
93 |
+
|
94 |
+
# farm_name = "Arawa"
|
95 |
+
# layer = "SM2"
|
96 |
+
|
97 |
+
time_delta = datetime.timedelta(days=20)
|
98 |
+
FAIL_IMAGE = dashapp.get_asset_url("icons/fail.png")
|
99 |
+
SUCCESS_IMAGE = dashapp.get_asset_url("icons/success.png")
|
100 |
+
WAIT_IMAGE = dashapp.get_asset_url("icons/wait.png")
|
101 |
+
|
102 |
+
current_working_directory = os.getcwd()
|
103 |
+
dashapp.index_template = os.path.join(
|
104 |
+
current_working_directory, "templates", "dashapp.html"
|
105 |
+
)
|
106 |
+
dashapp.layout = layout(WAIT_IMAGE)
|
107 |
+
|
108 |
+
|
109 |
+
def find_model_path(real_path):
|
110 |
+
for root, dirs, _ in os.walk(real_path):
|
111 |
+
# print(root,dirs,"\n\n")
|
112 |
+
if "soilwatermodel" in dirs:
|
113 |
+
real_path = os.path.join(root, "soilwatermodel")
|
114 |
+
print("real_path is", real_path)
|
115 |
+
return real_path
|
116 |
+
return real_path
|
117 |
+
|
118 |
+
|
119 |
+
# ====================================================================================================
|
120 |
+
# Callbacks
|
121 |
+
# ====================================================================================================
|
122 |
+
@dashapp.callback(
|
123 |
+
[
|
124 |
+
Output("farm-name-session", "data"),
|
125 |
+
Output("farm-image", "src"),
|
126 |
+
],
|
127 |
+
[Input("farm-name", "value"), State("farm-name-session", "data")],
|
128 |
+
)
|
129 |
+
def update_session(farm_name, session):
|
130 |
+
global MODEL_PATH
|
131 |
+
session = farm_name
|
132 |
+
if farm_name is None or farm_name == "":
|
133 |
+
session = ""
|
134 |
+
image = WAIT_IMAGE
|
135 |
+
else:
|
136 |
+
print(f"Getting some data about farm: {farm_name}")
|
137 |
+
|
138 |
+
# if the path does not exist, do not update the session
|
139 |
+
real_path = INPUT.format(farm_name)
|
140 |
+
|
141 |
+
print(f"Checking: {real_path}")
|
142 |
+
try:
|
143 |
+
path = find_model_path(real_path)
|
144 |
+
real_path = path
|
145 |
+
except Exception:
|
146 |
+
print("Exception raised while searching for the root")
|
147 |
+
|
148 |
+
MODEL_PATH = path
|
149 |
+
|
150 |
+
if os.path.exists(real_path):
|
151 |
+
session = farm_name
|
152 |
+
image = SUCCESS_IMAGE
|
153 |
+
else:
|
154 |
+
session = ""
|
155 |
+
image = FAIL_IMAGE
|
156 |
+
|
157 |
+
print(f"\n\nSession updated to {session}")
|
158 |
+
print(f"Image updated to {image}\n\n")
|
159 |
+
|
160 |
+
return session, image
|
161 |
+
|
162 |
+
|
163 |
+
@dashapp.callback(
|
164 |
+
Output("farm-name", "value"),
|
165 |
+
Input("farm-name-session", "modified_timestamp"),
|
166 |
+
State("farm-name-session", "data"),
|
167 |
+
)
|
168 |
+
def display_name_from_session(timestamp, name):
|
169 |
+
print(f"Updating the farm name from the session: {name}")
|
170 |
+
if timestamp is not None:
|
171 |
+
return name
|
172 |
+
else:
|
173 |
+
return ""
|
174 |
+
|
175 |
+
|
176 |
+
@dashapp.callback(
|
177 |
+
Output("visualisation-select", "options"),
|
178 |
+
# Input("farm-name", "value"),
|
179 |
+
Input("layer-dropdown", "value"),
|
180 |
+
Input("window-select", "start_date"),
|
181 |
+
Input("window-select", "end_date"),
|
182 |
+
Input("historic-dropdown", "value"),
|
183 |
+
Input("w-aggregation-dropdown", "value"),
|
184 |
+
Input("h-aggregation-dropdown", "value"),
|
185 |
+
Input("generate-button", "n_clicks"),
|
186 |
+
State("farm-name-session", "data"),
|
187 |
+
)
|
188 |
+
def get_analysis(
|
189 |
+
layer, window_start, window_end, historic_years, w_agg, h_agg, n_clicks, farm_name
|
190 |
+
) -> List[Dict[str, str]]:
|
191 |
+
"""Get the analysis files and return them as a list of dicts.
|
192 |
+
|
193 |
+
Parameters
|
194 |
+
----------
|
195 |
+
layer : str
|
196 |
+
layer to use for the analysis
|
197 |
+
window_start : str
|
198 |
+
start date of the window
|
199 |
+
window_end : str
|
200 |
+
end date of the window
|
201 |
+
historic_years : int
|
202 |
+
number of years to use for the historic data
|
203 |
+
w_agg : str
|
204 |
+
aggregation method for the window data
|
205 |
+
h_agg : str
|
206 |
+
aggregation method for the historic data
|
207 |
+
n_clicks : int
|
208 |
+
number of times the generate button has been clicked
|
209 |
+
|
210 |
+
Returns
|
211 |
+
-------
|
212 |
+
files : list
|
213 |
+
list of dicts of analysis files
|
214 |
+
"""
|
215 |
+
global MODEL_PATH
|
216 |
+
print("\nAnalysis callback triggered")
|
217 |
+
|
218 |
+
if n_clicks == 0 or n_clicks is None:
|
219 |
+
raise PreventUpdate
|
220 |
+
|
221 |
+
# window_start = datetime.datetime.strptime(window_start, '%Y-%m-%d')
|
222 |
+
# window_end = datetime.datetime.strptime(window_end, '%Y-%m-%d')
|
223 |
+
print(f"\nPath: {MODEL_PATH}\n")
|
224 |
+
|
225 |
+
files = perform_analysis(
|
226 |
+
input=MODEL_PATH,
|
227 |
+
window_start=window_start,
|
228 |
+
window_end=window_end,
|
229 |
+
historic_years=historic_years,
|
230 |
+
layer=layer,
|
231 |
+
agg_window=w_agg,
|
232 |
+
agg_history=h_agg,
|
233 |
+
comparison="diff",
|
234 |
+
output=None,
|
235 |
+
match_raster=None,
|
236 |
+
)
|
237 |
+
|
238 |
+
print(MODEL_PATH)
|
239 |
+
print(
|
240 |
+
f"n_clicks: {n_clicks}\n"
|
241 |
+
+ f"window_start: {window_start}\n"
|
242 |
+
+ f"window_end: {window_end}\n"
|
243 |
+
+ f"historic_years: {historic_years}\n"
|
244 |
+
+ f"layer: {layer}\n"
|
245 |
+
+ f"agg_window: {w_agg}\n"
|
246 |
+
+ f"agg_history: {h_agg}\n"
|
247 |
+
+ "comparison: 'diff'\n"
|
248 |
+
+ f"output: {None}\n"
|
249 |
+
+ f"match_raster: {None}\n"
|
250 |
+
)
|
251 |
+
print(files)
|
252 |
+
files = {
|
253 |
+
i: [
|
254 |
+
" ".join(files[i].split("/")[-1].split(".")[0].split("-")).capitalize(),
|
255 |
+
files[i],
|
256 |
+
]
|
257 |
+
for i in files
|
258 |
+
}
|
259 |
+
print(files)
|
260 |
+
options = [{"label": files[i][0], "value": files[i][1]} for i in files]
|
261 |
+
|
262 |
+
return options
|
263 |
+
|
264 |
+
|
265 |
+
@dashapp.callback(
|
266 |
+
Output("graph", "figure"),
|
267 |
+
Input("visualisation-select", "value"),
|
268 |
+
Input("platter-dropdown", "value"),
|
269 |
+
Input("generate-button", "n_clicks"),
|
270 |
+
)
|
271 |
+
def change_colorscale(file, palette, n_clicks):
|
272 |
+
"""Display the selected visualisation and change the colorscale of the
|
273 |
+
visualisation.
|
274 |
+
|
275 |
+
Parameters
|
276 |
+
----------
|
277 |
+
file : str
|
278 |
+
path to the visualisation file
|
279 |
+
palette : str
|
280 |
+
name of the colorscale to use
|
281 |
+
|
282 |
+
Returns
|
283 |
+
-------
|
284 |
+
fig : plotly.graph_objects.Figure
|
285 |
+
plotly figure object
|
286 |
+
"""
|
287 |
+
if n_clicks == 0 or n_clicks is None or file is None:
|
288 |
+
raise PreventUpdate
|
289 |
+
|
290 |
+
band1, lons_a, lats_a = open_image(file)
|
291 |
+
|
292 |
+
# Get the second dimension of the lons
|
293 |
+
lats = lats_a[:, 0]
|
294 |
+
lons = lons_a[0, :]
|
295 |
+
if "quantile" in file:
|
296 |
+
value_type = "Percentile"
|
297 |
+
else:
|
298 |
+
value_type = "SM"
|
299 |
+
|
300 |
+
print(lons.shape, lons)
|
301 |
+
print(lats.shape, lats)
|
302 |
+
print(band1.shape, band1)
|
303 |
+
print(file)
|
304 |
+
fig = px.imshow(band1, x=lons, y=lats, color_continuous_scale=palette)
|
305 |
+
fig.update(
|
306 |
+
data=[
|
307 |
+
{
|
308 |
+
"customdata": np.stack((band1, lats_a, lons_a), axis=-1),
|
309 |
+
"hovertemplate": f"<b>{value_type}"
|
310 |
+
+ "</b>: %{customdata[0]}<br>"
|
311 |
+
+ "<b>Lat</b>: %{customdata[1]}<br>"
|
312 |
+
+ "<b>Lon</b>: %{customdata[2]}<br>"
|
313 |
+
+ "<extra></extra>",
|
314 |
+
}
|
315 |
+
]
|
316 |
+
)
|
317 |
+
print("Render successful")
|
318 |
+
return fig
|
319 |
+
|
320 |
+
|
321 |
+
# ==============================================================================
|
322 |
+
# Main
|
323 |
+
# ==============================================================================
|
324 |
+
|
325 |
+
if __name__ == "__main__":
|
326 |
+
# Load Configs
|
327 |
+
parser = argparse.ArgumentParser(
|
328 |
+
description="Download rainfall data from Google Earth Engine for a range of dates.",
|
329 |
+
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
|
330 |
+
)
|
331 |
+
parser.add_argument(
|
332 |
+
"-i",
|
333 |
+
"--input",
|
334 |
+
help="Absolute or relative path to the netcdf data directory for each farm. Should be in this format: '/path/to/farm/{}/soilwatermodel'",
|
335 |
+
default=os.path.join(os.path.expanduser("~"), "Data/results_default/{}"),
|
336 |
+
)
|
337 |
+
parser.add_argument(
|
338 |
+
"-d",
|
339 |
+
"--debug",
|
340 |
+
help="Debug mode as True or False. Default is True.",
|
341 |
+
default=True,
|
342 |
+
)
|
343 |
+
|
344 |
+
args = parser.parse_args()
|
345 |
+
INPUT = args.input
|
346 |
+
|
347 |
+
try:
|
348 |
+
# dashapp.run_server(debug=args.debug)
|
349 |
+
app.run(debug=args.debug)
|
350 |
+
except Exception as e:
|
351 |
+
vprint(
|
352 |
+
0,
|
353 |
+
V,
|
354 |
+
V_IGNORE,
|
355 |
+
Error="Failed to execute the main function:",
|
356 |
+
ErrorMessage=e,
|
357 |
+
)
|
358 |
+
traceback.print_exc()
|
359 |
+
raise e
|
app.yaml
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright 2023 Sydney Informatics Hub (SIH)
|
2 |
+
|
3 |
+
runtime: python39
|
4 |
+
|
5 |
+
handlers:
|
6 |
+
# This configures Google App Engine to serve the files in the app's static
|
7 |
+
# directory.
|
8 |
+
- url: /static
|
9 |
+
static_dir: static
|
10 |
+
|
11 |
+
# This handler routes all requests not caught above to your main app. It is
|
12 |
+
# required when static routes are defined, but can be omitted (along with
|
13 |
+
# the entire handlers section) when there are no static files defined.
|
14 |
+
- url: /.*
|
15 |
+
script: auto
|
assets/icons/fail.png
ADDED
![]() |
assets/icons/success.png
ADDED
![]() |
assets/icons/wait.png
ADDED
![]() |
dashboard/__init__.py
ADDED
File without changes
|
jobadd/__init__.py
ADDED
File without changes
|
libs/dashapp.py
ADDED
@@ -0,0 +1,947 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
import gc
|
3 |
+
import glob
|
4 |
+
import os
|
5 |
+
from datetime import timedelta
|
6 |
+
from typing import Dict
|
7 |
+
|
8 |
+
import fiona
|
9 |
+
import geopandas as gpd
|
10 |
+
import netCDF4
|
11 |
+
import numpy as np
|
12 |
+
import pandas as pd
|
13 |
+
import rasterio
|
14 |
+
from shapely.validation import make_valid
|
15 |
+
|
16 |
+
from libs.utils import verbose as vprint
|
17 |
+
|
18 |
+
V = 1
|
19 |
+
V_IGNORE = [] # Debug, Warning, Error
|
20 |
+
|
21 |
+
|
22 |
+
def read_shape_file(
|
23 |
+
file_name: str,
|
24 |
+
epsg_num: int = 4326,
|
25 |
+
dissolve=True,
|
26 |
+
# buffer: float = 100,
|
27 |
+
*args,
|
28 |
+
**kwargs,
|
29 |
+
) -> gpd.GeoDataFrame:
|
30 |
+
"""Read a shape file and return a geo-dataframe object.
|
31 |
+
|
32 |
+
Parameters
|
33 |
+
----------
|
34 |
+
file_name : str
|
35 |
+
path to shape file
|
36 |
+
epsg_num : int, optional
|
37 |
+
coordinate system number
|
38 |
+
dissolve : bool, optional
|
39 |
+
Dissolve to one geometry, e.g. get the most external shape.
|
40 |
+
Useful when interested in the
|
41 |
+
|
42 |
+
Returns
|
43 |
+
-------
|
44 |
+
gpd.GeoDataFrame
|
45 |
+
pandas geo-dataframe with the geometry data and other infos
|
46 |
+
|
47 |
+
Raises
|
48 |
+
------
|
49 |
+
FileNotFoundError
|
50 |
+
error if file is not found
|
51 |
+
"""
|
52 |
+
if not os.path.exists(file_name):
|
53 |
+
raise FileNotFoundError
|
54 |
+
|
55 |
+
geo_df = gpd.read_file(file_name)
|
56 |
+
if epsg_num is not None:
|
57 |
+
geo_df["geometry"] = geo_df["geometry"].to_crs(epsg=epsg_num)
|
58 |
+
|
59 |
+
crs = geo_df.crs
|
60 |
+
if crs is None:
|
61 |
+
crs = 4326
|
62 |
+
|
63 |
+
print("Initial CRS:", crs)
|
64 |
+
|
65 |
+
# check for multi
|
66 |
+
geo_df.geometry = geo_df.apply(
|
67 |
+
lambda row: make_valid(row.geometry)
|
68 |
+
if not row.geometry.is_valid
|
69 |
+
else row.geometry,
|
70 |
+
axis=1,
|
71 |
+
)
|
72 |
+
return geo_df
|
73 |
+
|
74 |
+
|
75 |
+
def find_match_raster(model_path):
|
76 |
+
# navigate one directory up in model_path
|
77 |
+
root = os.path.dirname(model_path)
|
78 |
+
root = os.path.dirname(root)
|
79 |
+
root = os.path.dirname(root)
|
80 |
+
print("Looking for a matching raster in:", root)
|
81 |
+
for root, dirs, _ in os.walk(root):
|
82 |
+
# print(root,dirs,"\n\n")
|
83 |
+
if "et_pp" in dirs:
|
84 |
+
real_path = os.path.join(root, "et_pp")
|
85 |
+
print("raster_path is", real_path)
|
86 |
+
# for file in os.listdir(real_path):
|
87 |
+
# if file.endswith(".tif"):
|
88 |
+
# match_raster = os.path.join(real_path,file)
|
89 |
+
# print("Found match raster:",match_raster)
|
90 |
+
return real_path
|
91 |
+
return None
|
92 |
+
|
93 |
+
|
94 |
+
def find_shape_file(model_path):
|
95 |
+
# navigate one directory up in model_path
|
96 |
+
root = os.path.dirname(model_path)
|
97 |
+
root = os.path.dirname(root)
|
98 |
+
root = os.path.dirname(root)
|
99 |
+
print("Looking for a shape file in:", root)
|
100 |
+
for root, dirs, files in os.walk(root):
|
101 |
+
# print(root,dirs,"\n\n")
|
102 |
+
for file in files:
|
103 |
+
if file.endswith(".shp"):
|
104 |
+
real_path = os.path.join(root, file)
|
105 |
+
return real_path
|
106 |
+
return None
|
107 |
+
|
108 |
+
|
109 |
+
def get_range_agg(
|
110 |
+
input_dir: str,
|
111 |
+
window_start: str,
|
112 |
+
window_end: str,
|
113 |
+
layer_name: str,
|
114 |
+
agg: str = "mean",
|
115 |
+
) -> np.ndarray:
|
116 |
+
"""Get the mean for a given window_start and window_end dates.
|
117 |
+
|
118 |
+
Parameters
|
119 |
+
----------
|
120 |
+
input_dir : str
|
121 |
+
Path to the directory containing the netcdf files.
|
122 |
+
window_start : str
|
123 |
+
Start date of the window. Format: YYYY-MM-DD.
|
124 |
+
window_end : str
|
125 |
+
End date of the window. Format: YYYY-MM-DD.
|
126 |
+
layer_name : str
|
127 |
+
Soil layer to consider for the mean.
|
128 |
+
agg : str
|
129 |
+
Aggregation method to use. Possible values: mean, median, max, min, std, or None.
|
130 |
+
|
131 |
+
Returns
|
132 |
+
-------
|
133 |
+
np.ndarray
|
134 |
+
Mean raster for the given window_start and window_end dates.
|
135 |
+
"""
|
136 |
+
|
137 |
+
# Get the list of dates between two dates if date_from and date_to
|
138 |
+
dates = pd.DataFrame(
|
139 |
+
pd.date_range(
|
140 |
+
pd.to_datetime(window_start),
|
141 |
+
pd.to_datetime(window_end) - timedelta(days=1),
|
142 |
+
freq="d",
|
143 |
+
),
|
144 |
+
columns=["date"],
|
145 |
+
) # .strftime('%Y-%m-%d')
|
146 |
+
dates["dayofyear"] = dates["date"].dt.dayofyear - 1
|
147 |
+
dates["year"] = dates["date"].dt.year
|
148 |
+
dates["str_dates"] = dates["date"].dt.strftime("%Y-%m-%d")
|
149 |
+
|
150 |
+
yearly_dates = dates.groupby("year")["dayofyear"].apply(list).to_dict()
|
151 |
+
|
152 |
+
data_l = list()
|
153 |
+
# For each year, get the data for layer_name for the dates specified in yearly_dates
|
154 |
+
for year in yearly_dates:
|
155 |
+
# read the year file
|
156 |
+
nc_y = netCDF4.Dataset(os.path.join(input_dir, f"model_{year}.nc"))
|
157 |
+
|
158 |
+
vprint(
|
159 |
+
1,
|
160 |
+
V,
|
161 |
+
V_IGNORE,
|
162 |
+
Debug=f"getting data for year: {year} from layer: {layer_name}...",
|
163 |
+
)
|
164 |
+
# Get the data for the layer_name
|
165 |
+
data = nc_y.variables[layer_name][:, :, :]
|
166 |
+
|
167 |
+
# Get the data for the dates
|
168 |
+
days = yearly_dates[year]
|
169 |
+
|
170 |
+
data = data[days, :, :]
|
171 |
+
|
172 |
+
data_l.append(data)
|
173 |
+
nc_y.close()
|
174 |
+
del data
|
175 |
+
gc.collect()
|
176 |
+
|
177 |
+
# Concat data for all years
|
178 |
+
data_concat = np.concatenate(data_l, axis=0)
|
179 |
+
data_concat.shape
|
180 |
+
if agg == "mean":
|
181 |
+
# Get the mean raster for the range
|
182 |
+
data_agg = np.mean(data_concat, axis=0)
|
183 |
+
elif agg == "median":
|
184 |
+
# Get the median raster for the range
|
185 |
+
data_agg = np.median(data_concat, axis=0)
|
186 |
+
elif agg == "max":
|
187 |
+
# Get the max raster for the range
|
188 |
+
data_agg = np.max(data_concat, axis=0)
|
189 |
+
elif agg == "min":
|
190 |
+
# Get the min raster for the range
|
191 |
+
data_agg = np.min(data_concat, axis=0)
|
192 |
+
elif agg == "std":
|
193 |
+
# Get the std raster for the range
|
194 |
+
data_agg = np.std(data_concat, axis=0)
|
195 |
+
elif agg == "var":
|
196 |
+
# Get the var raster for the range
|
197 |
+
data_agg = np.var(data_concat, axis=0)
|
198 |
+
elif agg == "sum":
|
199 |
+
# Get the sum raster for the range
|
200 |
+
data_agg = np.sum(data_concat, axis=0)
|
201 |
+
elif agg is None:
|
202 |
+
data_agg = data_concat.copy()
|
203 |
+
else:
|
204 |
+
raise ValueError(
|
205 |
+
f"agg should be one of 'mean', 'median', 'max', 'min', 'std', 'var', 'sum', or a None value. {agg} was provided."
|
206 |
+
)
|
207 |
+
print("done.")
|
208 |
+
return data_agg
|
209 |
+
|
210 |
+
|
211 |
+
def get_historic_agg(
|
212 |
+
input_dir: str,
|
213 |
+
historic_years: int,
|
214 |
+
current_window_start: str,
|
215 |
+
current_window_end: str,
|
216 |
+
layer_name: str,
|
217 |
+
agg_window: str = "mean",
|
218 |
+
agg_history: str = "mean",
|
219 |
+
) -> np.ndarray:
|
220 |
+
"""Get the historic mean for a given window_start and window_end dates.
|
221 |
+
|
222 |
+
Parameters
|
223 |
+
----------
|
224 |
+
input_dir : str
|
225 |
+
Path to the directory containing the netcdf files.
|
226 |
+
historic_years : int
|
227 |
+
Number of historic years to consider for the mean.
|
228 |
+
current_window_start : str
|
229 |
+
Start date of the current window. Format: YYYY-MM-DD.
|
230 |
+
current_window_end : str
|
231 |
+
End date of the current window. Format: YYYY-MM-DD.
|
232 |
+
layer_name : str
|
233 |
+
Soil layer to consider for the mean.
|
234 |
+
agg_window : str
|
235 |
+
Aggregation method for the window (applies to both current and historic). Default is "mean". Possible values: "mean", "median", "max", "min", "std", "var", None (None will just make a 3D matrix for each year).
|
236 |
+
agg_history : str
|
237 |
+
Aggregation method for the historic years. This defines the way all years are combined into one. Default is "mean". Possible values: "mean", "median", "max", "min", "std", "var", "quantiles".
|
238 |
+
If `quantile` is selected, the historic cube will be sent back as a 3D matrix (with non-agregated window and concatenated on axis 0 instead) for quantile comparison.
|
239 |
+
Returns
|
240 |
+
-------
|
241 |
+
np.ndarray
|
242 |
+
Array of the historic mean for the given window_start and window_end dates for the historic years.
|
243 |
+
|
244 |
+
Raises
|
245 |
+
------
|
246 |
+
FileNotFoundError
|
247 |
+
If the file for the historic year is not found. Possible solutions:
|
248 |
+
- The historic year should be modelled before calling this function.
|
249 |
+
- The path to the historic year should be changed.
|
250 |
+
- Calculate for a more recent historic year by reducing historic_years value.
|
251 |
+
"""
|
252 |
+
|
253 |
+
# Get the window_start year
|
254 |
+
window_start_year = pd.to_datetime(current_window_start).year
|
255 |
+
window_end_year = pd.to_datetime(current_window_end).year
|
256 |
+
|
257 |
+
# Get the first year
|
258 |
+
first_year = window_start_year - historic_years
|
259 |
+
|
260 |
+
# Check if file exists for this year
|
261 |
+
if os.path.exists(os.path.join(input_dir, f"model_{first_year}.nc")):
|
262 |
+
# Get the list of historic windows
|
263 |
+
historic_agg = {}
|
264 |
+
for year in range(1, historic_years + 1):
|
265 |
+
args = {
|
266 |
+
"input_dir": input_dir,
|
267 |
+
"window_start": f"{window_start_year-year}{current_window_start[4:]}",
|
268 |
+
"window_end": f"{window_end_year-year}{current_window_end[4:]}",
|
269 |
+
"layer_name": layer_name,
|
270 |
+
"agg": agg_window,
|
271 |
+
}
|
272 |
+
# Get the range mean
|
273 |
+
historic_agg[window_start_year - year] = get_range_agg(**args)
|
274 |
+
historic_agg_np = np.array([historic_agg[year] for year in historic_agg])
|
275 |
+
# Get the aggregation of the historic years
|
276 |
+
if agg_history == "mean":
|
277 |
+
historic_agg_np = np.mean(historic_agg_np, axis=0)
|
278 |
+
elif agg_history == "median":
|
279 |
+
historic_agg_np = np.median(historic_agg_np, axis=0)
|
280 |
+
elif agg_history == "max":
|
281 |
+
historic_agg_np = np.max(historic_agg_np, axis=0)
|
282 |
+
elif agg_history == "min":
|
283 |
+
historic_agg_np = np.min(historic_agg_np, axis=0)
|
284 |
+
elif agg_history == "std":
|
285 |
+
historic_agg_np = np.std(historic_agg_np, axis=0)
|
286 |
+
elif agg_history == "var":
|
287 |
+
historic_agg_np = np.var(historic_agg_np, axis=0)
|
288 |
+
elif agg_history == "sum":
|
289 |
+
historic_agg_np = np.sum(historic_agg_np, axis=0)
|
290 |
+
elif agg_history is None:
|
291 |
+
historic_agg_np = np.concatenate(list(historic_agg.values()), axis=0)
|
292 |
+
else:
|
293 |
+
raise ValueError(
|
294 |
+
f"Invalid aggregation method: {agg_history}. Possible values: mean, median, max, min, std, var, sum."
|
295 |
+
)
|
296 |
+
return historic_agg_np
|
297 |
+
else:
|
298 |
+
raise FileNotFoundError(
|
299 |
+
f"File not found for the historic data: {os.path.join(input_dir,f'model_{first_year}.nc')}. Make sure the path is correct and the historic year for the requested year is modelled before calling this function."
|
300 |
+
)
|
301 |
+
|
302 |
+
|
303 |
+
def save(path, array, profile):
|
304 |
+
"""Save the array as a raster.
|
305 |
+
|
306 |
+
Parameters
|
307 |
+
----------
|
308 |
+
path : str
|
309 |
+
Path to the raster to save.
|
310 |
+
array : np.ndarray
|
311 |
+
Array to save as a raster.
|
312 |
+
profile : dict
|
313 |
+
Profile of the raster to save.
|
314 |
+
"""
|
315 |
+
|
316 |
+
with rasterio.open(path, "w", **profile) as dst:
|
317 |
+
dst.write(array, 1)
|
318 |
+
|
319 |
+
|
320 |
+
def analyse(
|
321 |
+
input,
|
322 |
+
window_start,
|
323 |
+
window_end,
|
324 |
+
historic_years: int,
|
325 |
+
layer: str,
|
326 |
+
match_raster: str = None,
|
327 |
+
output: str = None,
|
328 |
+
agg_history: str = "mean",
|
329 |
+
agg_window: str = "mean",
|
330 |
+
**kwargs,
|
331 |
+
) -> Dict[str, str]:
|
332 |
+
"""Main function to run the script.
|
333 |
+
|
334 |
+
Parameters
|
335 |
+
----------
|
336 |
+
input : str
|
337 |
+
Path to the input raster.
|
338 |
+
window_start : str
|
339 |
+
Start date of the window. Format: YYYY-MM-DD.
|
340 |
+
window_end : str
|
341 |
+
End date of the window. Format: YYYY-MM-DD.
|
342 |
+
historic_years : int
|
343 |
+
Number of historic years to use for the comparison.
|
344 |
+
layer : str
|
345 |
+
Soil layer to consider for the comparison.
|
346 |
+
match_raster : str
|
347 |
+
Path to the match raster. Default: None. If None, the match raster will be searched in the et_pp directory based on the input directory.
|
348 |
+
output : str
|
349 |
+
Path to the output raster. Default: None. If None, the output raster will be saved in the same directory as the input raster.
|
350 |
+
agg_history : str
|
351 |
+
Aggregation method to use for the historic years. Possible values: 'mean', 'median', 'max', 'min', 'std', None. Default: 'mean'.
|
352 |
+
agg_window : str
|
353 |
+
Aggregation method to use for the window. Possible values: 'mean', 'median', 'max', 'min', 'std', None. Default: 'mean'.
|
354 |
+
|
355 |
+
Returns
|
356 |
+
-------
|
357 |
+
Dict[str,str]
|
358 |
+
Dictionary with the path to the output rasters.
|
359 |
+
"""
|
360 |
+
|
361 |
+
if output is None:
|
362 |
+
output = os.path.join(input, "analysis")
|
363 |
+
|
364 |
+
# Create the output directory if it does not exist
|
365 |
+
if not os.path.exists(output):
|
366 |
+
os.makedirs(output)
|
367 |
+
|
368 |
+
if match_raster is None:
|
369 |
+
match_raster = find_match_raster(input)
|
370 |
+
print("match_raster is:", match_raster)
|
371 |
+
|
372 |
+
if match_raster is not None:
|
373 |
+
print("Found match raster:", match_raster)
|
374 |
+
|
375 |
+
files = glob.glob(os.path.join(match_raster, f"{window_start[:7]}*.tif"))
|
376 |
+
if len(files) == 0:
|
377 |
+
files = glob.glob(os.path.join(match_raster, f"{window_end[:7]}*.tif"))
|
378 |
+
if len(files) == 0:
|
379 |
+
vprint(
|
380 |
+
1,
|
381 |
+
V,
|
382 |
+
V_IGNORE,
|
383 |
+
Debug=f"Expanding the search for match raster file to find e closer date to {window_start[:5]}...",
|
384 |
+
)
|
385 |
+
files = glob.glob(os.path.join(match_raster, f"{window_start[:5]}*.tif"))
|
386 |
+
if len(files) == 0:
|
387 |
+
vprint(
|
388 |
+
1,
|
389 |
+
V,
|
390 |
+
V_IGNORE,
|
391 |
+
Debug=f"Expanding the search further for match raster file to find e closer date to {window_end[:5]}...",
|
392 |
+
)
|
393 |
+
files = glob.glob(os.path.join(match_raster, f"{window_end[:5]}*.tif"))
|
394 |
+
if len(files) == 0:
|
395 |
+
raise FileNotFoundError(
|
396 |
+
f"Could not find any matching raster in {match_raster} for the rage of dates given at {window_start} / {window_end}!"
|
397 |
+
)
|
398 |
+
print(f"Found {len(files)} matching raster file {files[0]}.")
|
399 |
+
match_raster = files[0]
|
400 |
+
|
401 |
+
with rasterio.open(match_raster) as src:
|
402 |
+
profile = src.profile
|
403 |
+
|
404 |
+
# Get the layers
|
405 |
+
layer = layer
|
406 |
+
# Get the historic aggregated data
|
407 |
+
|
408 |
+
# Get aggregated current window data
|
409 |
+
current_data = get_range_agg(
|
410 |
+
input_dir=input,
|
411 |
+
window_start=window_start,
|
412 |
+
window_end=window_end,
|
413 |
+
agg=agg_window,
|
414 |
+
layer_name=layer,
|
415 |
+
)
|
416 |
+
|
417 |
+
historic_data = get_historic_agg(
|
418 |
+
input_dir=input,
|
419 |
+
historic_years=historic_years,
|
420 |
+
current_window_start=window_start,
|
421 |
+
current_window_end=window_end,
|
422 |
+
agg_window=agg_window,
|
423 |
+
agg_history=agg_history,
|
424 |
+
layer_name=layer,
|
425 |
+
)
|
426 |
+
|
427 |
+
historic_data_quant = get_historic_agg(
|
428 |
+
input_dir=input,
|
429 |
+
historic_years=historic_years,
|
430 |
+
current_window_start=window_start,
|
431 |
+
current_window_end=window_end,
|
432 |
+
agg_window=None,
|
433 |
+
agg_history=None,
|
434 |
+
layer_name=layer,
|
435 |
+
)
|
436 |
+
|
437 |
+
# Compare the two rasters
|
438 |
+
delta = current_data - historic_data
|
439 |
+
|
440 |
+
quantile = current_data.copy()
|
441 |
+
print("\nCalculating quantiles...", "\n=========================")
|
442 |
+
print("Data shape:", current_data.shape)
|
443 |
+
print("Historic data shape:", historic_data_quant.shape)
|
444 |
+
print("=========================\n")
|
445 |
+
pixel_now = []
|
446 |
+
pixel_hist = []
|
447 |
+
pixel_quant = []
|
448 |
+
for i in range(current_data.shape[0]):
|
449 |
+
for j in range(current_data.shape[1]):
|
450 |
+
sorted_scores = np.array(sorted(historic_data_quant[:, i, j]))
|
451 |
+
quantile[i, j] = (
|
452 |
+
(sorted_scores.searchsorted(current_data[i, j]))
|
453 |
+
/ sorted_scores.shape[0]
|
454 |
+
* 100
|
455 |
+
)
|
456 |
+
pixel_now.append(current_data[i, j])
|
457 |
+
pixel_hist.append(sorted_scores)
|
458 |
+
pixel_quant.append(quantile[i, j])
|
459 |
+
|
460 |
+
df_quants = pd.DataFrame(pixel_hist)
|
461 |
+
df_quants["pixel_now"] = pixel_now
|
462 |
+
df_quants["pixel_quant"] = pixel_quant
|
463 |
+
print("shapes are:", len(pixel_now), len(pixel_quant))
|
464 |
+
df_quants = df_quants.sort_values(by=["pixel_quant"], ascending=False)
|
465 |
+
print(df_quants)
|
466 |
+
df_quants.to_csv(
|
467 |
+
os.path.join(
|
468 |
+
output,
|
469 |
+
f"quantiles-{window_start.replace('-','_')}-{window_end.replace('-','_')}-{layer}-w_{agg_window}-h_concat-y_{historic_years}.csv",
|
470 |
+
)
|
471 |
+
)
|
472 |
+
|
473 |
+
# Search for a shape file ending with .shp
|
474 |
+
shape_file = find_shape_file(input)
|
475 |
+
print("Shape file:", shape_file)
|
476 |
+
|
477 |
+
# Save the rasters
|
478 |
+
historic_raster = os.path.join(
|
479 |
+
output,
|
480 |
+
f"historic-{window_start.replace('-','_')}-{window_end.replace('-','_')}-{layer}-w_{agg_window}-h_{agg_history}-y_{historic_years}.tif",
|
481 |
+
)
|
482 |
+
current_raster = os.path.join(
|
483 |
+
output,
|
484 |
+
f"current-{window_start.replace('-','_')}-{window_end.replace('-','_')}-{layer}-w_{agg_window}.tif",
|
485 |
+
)
|
486 |
+
delta_raster = os.path.join(
|
487 |
+
output,
|
488 |
+
f"delta-{window_start.replace('-','_')}-{window_end.replace('-','_')}-{layer}-w_{agg_window}-h_{agg_history}-y_{historic_years}.tif",
|
489 |
+
)
|
490 |
+
quant_raster = os.path.join(
|
491 |
+
output,
|
492 |
+
f"quantile-{window_start.replace('-','_')}-{window_end.replace('-','_')}-{layer}-w_{agg_window}-h_concat-y_{historic_years}.tif",
|
493 |
+
)
|
494 |
+
|
495 |
+
save(historic_raster, historic_data, profile)
|
496 |
+
save(current_raster, current_data, profile)
|
497 |
+
save(delta_raster, delta, profile)
|
498 |
+
save(quant_raster, quantile, profile)
|
499 |
+
|
500 |
+
# Open the rasters
|
501 |
+
# with rasterio.open(historic_raster) as src:
|
502 |
+
# historic_raster = src.read(1)
|
503 |
+
|
504 |
+
# Clip the rasters to the shape file
|
505 |
+
if shape_file is not None:
|
506 |
+
print("Found shape file:", shape_file)
|
507 |
+
# shapes = read_shape_file(
|
508 |
+
# shape_file, epsg_num=None, dissolve=False
|
509 |
+
# ).geometry
|
510 |
+
try:
|
511 |
+
with fiona.open(shape_file, "r") as shapefile:
|
512 |
+
shapes = [feature["geometry"] for feature in shapefile]
|
513 |
+
|
514 |
+
try:
|
515 |
+
with rasterio.open(historic_raster) as src:
|
516 |
+
out_image, transformed = rasterio.mask.mask(
|
517 |
+
src, shapes, crop=True, filled=True
|
518 |
+
)
|
519 |
+
out_profile = src.profile.copy()
|
520 |
+
out_profile.update(
|
521 |
+
{
|
522 |
+
"width": out_image.shape[2],
|
523 |
+
"height": out_image.shape[1],
|
524 |
+
"transform": transformed,
|
525 |
+
}
|
526 |
+
)
|
527 |
+
with rasterio.open(historic_raster, "w", **out_profile) as dst:
|
528 |
+
dst.write(out_image)
|
529 |
+
except Exception as e:
|
530 |
+
print("Failed to crop historic raster", historic_raster)
|
531 |
+
print(e)
|
532 |
+
|
533 |
+
try:
|
534 |
+
with rasterio.open(current_raster) as src:
|
535 |
+
out_image, transformed = rasterio.mask.mask(
|
536 |
+
src, shapes, crop=True, filled=True
|
537 |
+
)
|
538 |
+
out_profile = src.profile.copy()
|
539 |
+
out_profile.update(
|
540 |
+
{
|
541 |
+
"width": out_image.shape[2],
|
542 |
+
"height": out_image.shape[1],
|
543 |
+
"transform": transformed,
|
544 |
+
}
|
545 |
+
)
|
546 |
+
with rasterio.open(current_raster, "w", **out_profile) as dst:
|
547 |
+
dst.write(out_image)
|
548 |
+
except Exception as e:
|
549 |
+
print("Failed to crop current raster", current_raster)
|
550 |
+
print(e)
|
551 |
+
|
552 |
+
try:
|
553 |
+
with rasterio.open(delta_raster) as src:
|
554 |
+
out_image, transformed = rasterio.mask.mask(
|
555 |
+
src, shapes, crop=True, filled=True
|
556 |
+
)
|
557 |
+
out_profile = src.profile.copy()
|
558 |
+
out_profile.update(
|
559 |
+
{
|
560 |
+
"width": out_image.shape[2],
|
561 |
+
"height": out_image.shape[1],
|
562 |
+
"transform": transformed,
|
563 |
+
}
|
564 |
+
)
|
565 |
+
with rasterio.open(delta_raster, "w", **out_profile) as dst:
|
566 |
+
dst.write(out_image)
|
567 |
+
except Exception as e:
|
568 |
+
print("Failed to crop delta raster", delta_raster)
|
569 |
+
print(e)
|
570 |
+
|
571 |
+
try:
|
572 |
+
with rasterio.open(quant_raster) as src:
|
573 |
+
out_image, transformed = rasterio.mask.mask(
|
574 |
+
src, shapes, crop=True, filled=True
|
575 |
+
)
|
576 |
+
out_profile = src.profile.copy()
|
577 |
+
out_profile.update(
|
578 |
+
{
|
579 |
+
"width": out_image.shape[2],
|
580 |
+
"height": out_image.shape[1],
|
581 |
+
"transform": transformed,
|
582 |
+
}
|
583 |
+
)
|
584 |
+
with rasterio.open(quant_raster, "w", **out_profile) as dst:
|
585 |
+
dst.write(out_image)
|
586 |
+
except Exception as e:
|
587 |
+
print("Failed to crop quantile raster", quant_raster)
|
588 |
+
print(e)
|
589 |
+
|
590 |
+
except Exception as e:
|
591 |
+
print("Could not open shape file:", shape_file)
|
592 |
+
print(e)
|
593 |
+
|
594 |
+
# current_data, _ = rasterio.mask.mask(current_data, shapes, crop=True)
|
595 |
+
# historic_data, _ = rasterio.mask.mask(historic_data, shapes, crop=True)
|
596 |
+
# delta, _ = rasterio.mask.mask(delta, shapes, crop=True)
|
597 |
+
# quantile, _ = rasterio.mask.mask(quantile, shapes, crop=True)
|
598 |
+
print("done.")
|
599 |
+
|
600 |
+
return {
|
601 |
+
"historic_raster": historic_raster,
|
602 |
+
"current_raster": current_raster,
|
603 |
+
"delta_raster": delta_raster,
|
604 |
+
"quant_raster": quant_raster,
|
605 |
+
}
|
606 |
+
|
607 |
+
|
608 |
+
def find_analyses(path):
|
609 |
+
"""Find all the analysis files in a directory.
|
610 |
+
|
611 |
+
Parameters
|
612 |
+
----------
|
613 |
+
path: str
|
614 |
+
Path to the directory containing the analysis files
|
615 |
+
|
616 |
+
Returns
|
617 |
+
-------
|
618 |
+
files: list
|
619 |
+
List of analysis files
|
620 |
+
"""
|
621 |
+
files = [f for f in os.listdir(path) if f.endswith(".tif")]
|
622 |
+
return files
|
623 |
+
|
624 |
+
|
625 |
+
def open_image(path):
|
626 |
+
"""Open a raster image and return the data and coordinates.
|
627 |
+
|
628 |
+
Parameters
|
629 |
+
----------
|
630 |
+
path: str
|
631 |
+
path to the raster image
|
632 |
+
|
633 |
+
Returns
|
634 |
+
-------
|
635 |
+
band1: np.array
|
636 |
+
The raster data
|
637 |
+
lons: np.array
|
638 |
+
The longitude coordinates
|
639 |
+
lats: np.array
|
640 |
+
The latitude coordinates
|
641 |
+
"""
|
642 |
+
with rasterio.open(path) as src:
|
643 |
+
band1 = src.read(1)
|
644 |
+
print("Band1 has shape", band1.shape)
|
645 |
+
height = band1.shape[0]
|
646 |
+
width = band1.shape[1]
|
647 |
+
cols, rows = np.meshgrid(np.arange(width), np.arange(height))
|
648 |
+
xs, ys = rasterio.transform.xy(src.transform, rows, cols)
|
649 |
+
lons = np.array(xs)
|
650 |
+
lats = np.array(ys)
|
651 |
+
|
652 |
+
return band1, lons, lats
|
653 |
+
|
654 |
+
|
655 |
+
def perform_analysis(
|
656 |
+
input,
|
657 |
+
window_start,
|
658 |
+
window_end,
|
659 |
+
historic_years: int,
|
660 |
+
layer: str,
|
661 |
+
match_raster: str = None,
|
662 |
+
output: str = None,
|
663 |
+
agg_history: str = "mean",
|
664 |
+
agg_window: str = "mean",
|
665 |
+
comparison: str = "diff",
|
666 |
+
**args,
|
667 |
+
) -> Dict[str, str]:
|
668 |
+
"""Perform the analysis.
|
669 |
+
|
670 |
+
This is a wrapper function for the analysis module. It takes the input parameters and passes them to the analysis module.
|
671 |
+
|
672 |
+
Parameters
|
673 |
+
----------
|
674 |
+
input : str
|
675 |
+
path to the input data
|
676 |
+
window_start : str
|
677 |
+
start date of the window
|
678 |
+
window_end : str
|
679 |
+
end date of the window
|
680 |
+
historic_years : int
|
681 |
+
number of years to use for the historic data
|
682 |
+
layer : str
|
683 |
+
layer to use for the analysis
|
684 |
+
match_raster : str, optional
|
685 |
+
path to the raster to match the output to, by default None
|
686 |
+
output : str, optional
|
687 |
+
path to the output file, by default None
|
688 |
+
agg_history : str, optional
|
689 |
+
aggregation method for the historic data, by default "mean"
|
690 |
+
agg_window : str, optional
|
691 |
+
aggregation method for the window data, by default "mean"
|
692 |
+
comparison : str, optional
|
693 |
+
comparison method for the window and historic data, by default "diff"
|
694 |
+
|
695 |
+
Returns
|
696 |
+
-------
|
697 |
+
files: dict
|
698 |
+
Dict of analysis files
|
699 |
+
"""
|
700 |
+
files = analyse(
|
701 |
+
input=input,
|
702 |
+
window_start=window_start,
|
703 |
+
window_end=window_end,
|
704 |
+
historic_years=historic_years,
|
705 |
+
agg_window=agg_window,
|
706 |
+
agg_history=agg_history,
|
707 |
+
comparison=comparison,
|
708 |
+
layer=layer,
|
709 |
+
output=output,
|
710 |
+
match_raster=match_raster,
|
711 |
+
)
|
712 |
+
return files
|
713 |
+
|
714 |
+
|
715 |
+
def layout(WAIT_IMAGE):
|
716 |
+
import datetime
|
717 |
+
|
718 |
+
import plotly.express as px
|
719 |
+
from dash import dcc, html
|
720 |
+
|
721 |
+
today = datetime.datetime.today()
|
722 |
+
|
723 |
+
colorscales = px.colors.named_colorscales()
|
724 |
+
|
725 |
+
layout = html.Div(
|
726 |
+
[
|
727 |
+
# html.Div(
|
728 |
+
# className="dashapp-header",
|
729 |
+
# children=[
|
730 |
+
# html.Div('Soil Moisture Comparison Tool', className="dashapp-header--title")
|
731 |
+
# ]
|
732 |
+
# ),
|
733 |
+
dcc.Store(id="farm-name-session", storage_type="session"),
|
734 |
+
html.Div(
|
735 |
+
[
|
736 |
+
html.P(
|
737 |
+
"""This tool will use the produced datacubes to compare the soil moisture of a farm against historic data.
|
738 |
+
Please select the desired comaprison method and dates to make the comparison as in section A.
|
739 |
+
Then choose the visualisation in section B to see the results.""",
|
740 |
+
style={"font-size": "larger"},
|
741 |
+
),
|
742 |
+
html.Hr(),
|
743 |
+
html.H3("A"),
|
744 |
+
],
|
745 |
+
className="col-lg-12",
|
746 |
+
style={"padding-top": "1%", "padding-left": "1%"},
|
747 |
+
),
|
748 |
+
html.Div(
|
749 |
+
[
|
750 |
+
html.Div(
|
751 |
+
[
|
752 |
+
# html.P("Write farm name/ID:"),
|
753 |
+
dcc.Input(
|
754 |
+
id="farm-name",
|
755 |
+
type="text",
|
756 |
+
placeholder="Farm name",
|
757 |
+
style={"width": "80%"},
|
758 |
+
),
|
759 |
+
html.Img(
|
760 |
+
id="farm-image",
|
761 |
+
src=WAIT_IMAGE,
|
762 |
+
style={"width": "30px", "margin-left": "15px"},
|
763 |
+
),
|
764 |
+
],
|
765 |
+
className="col-lg-5",
|
766 |
+
# style = {'padding-top':'1%', 'padding-left':'1%'}
|
767 |
+
),
|
768 |
+
html.Div(
|
769 |
+
[
|
770 |
+
html.P(),
|
771 |
+
],
|
772 |
+
className="col-lg-7",
|
773 |
+
# style = {'padding-top':'1%', 'padding-left':'1%'}
|
774 |
+
),
|
775 |
+
],
|
776 |
+
className="row",
|
777 |
+
style={"padding-top": "1%", "padding-left": "1%"},
|
778 |
+
),
|
779 |
+
html.Div(
|
780 |
+
[
|
781 |
+
html.Div(
|
782 |
+
[
|
783 |
+
html.P("Select soil layer:"),
|
784 |
+
dcc.Dropdown(
|
785 |
+
id="layer-dropdown",
|
786 |
+
options=[
|
787 |
+
{"label": "SM1", "value": "SM1"},
|
788 |
+
{"label": "SM2", "value": "SM2"},
|
789 |
+
{"label": "SM3", "value": "SM3"},
|
790 |
+
{"label": "SM4", "value": "SM4"},
|
791 |
+
{"label": "SM5", "value": "SM5"},
|
792 |
+
{"label": "DD", "value": "DD"},
|
793 |
+
],
|
794 |
+
value="SM2",
|
795 |
+
),
|
796 |
+
],
|
797 |
+
className="col-lg-4",
|
798 |
+
style={"padding": "1%"},
|
799 |
+
),
|
800 |
+
html.Div(
|
801 |
+
[
|
802 |
+
html.P("Select the historic years to compare against:"),
|
803 |
+
dcc.Dropdown(
|
804 |
+
id="historic-dropdown",
|
805 |
+
options=[
|
806 |
+
{"label": year, "value": year}
|
807 |
+
for year in range(1, 20)
|
808 |
+
],
|
809 |
+
value=2,
|
810 |
+
),
|
811 |
+
],
|
812 |
+
className="col-lg-4",
|
813 |
+
style={"padding": "1%"},
|
814 |
+
),
|
815 |
+
html.Div(
|
816 |
+
[
|
817 |
+
html.P(
|
818 |
+
"Select the most recent window of dates to analyse:"
|
819 |
+
),
|
820 |
+
dcc.DatePickerRange(
|
821 |
+
id="window-select",
|
822 |
+
min_date_allowed=datetime.date(2000, 1, 1),
|
823 |
+
max_date_allowed=today.strftime("%Y-%m-%d"),
|
824 |
+
initial_visible_month=datetime.date(2023, 1, 1),
|
825 |
+
clearable=False,
|
826 |
+
display_format="YYYY-MM-DD",
|
827 |
+
start_date_placeholder_text="Start date",
|
828 |
+
end_date_placeholder_text="End date",
|
829 |
+
style={"width": "100%"},
|
830 |
+
),
|
831 |
+
],
|
832 |
+
className="col-lg-4",
|
833 |
+
style={"padding": "1%"},
|
834 |
+
),
|
835 |
+
],
|
836 |
+
className="row",
|
837 |
+
style={"padding-top": "1%"},
|
838 |
+
),
|
839 |
+
html.Div(
|
840 |
+
[
|
841 |
+
html.Div(
|
842 |
+
[
|
843 |
+
html.P("Select window aggregation method:"),
|
844 |
+
dcc.Dropdown(
|
845 |
+
id="w-aggregation-dropdown",
|
846 |
+
options=[
|
847 |
+
{"label": "Mean", "value": "mean"},
|
848 |
+
{"label": "Median", "value": "median"},
|
849 |
+
{"label": "Max", "value": "max"},
|
850 |
+
{"label": "Min", "value": "min"},
|
851 |
+
{"label": "Sum", "value": "sum"},
|
852 |
+
{"label": "std", "value": "std"},
|
853 |
+
{"label": "var", "value": "var"},
|
854 |
+
],
|
855 |
+
value="mean",
|
856 |
+
),
|
857 |
+
],
|
858 |
+
className="col-lg-6",
|
859 |
+
style={"padding": "1%"},
|
860 |
+
),
|
861 |
+
html.Div(
|
862 |
+
[
|
863 |
+
html.P("Select historic aggregation method:"),
|
864 |
+
dcc.Dropdown(
|
865 |
+
id="h-aggregation-dropdown",
|
866 |
+
options=[
|
867 |
+
{"label": "Mean", "value": "mean"},
|
868 |
+
{"label": "Median", "value": "median"},
|
869 |
+
{"label": "Max", "value": "max"},
|
870 |
+
{"label": "Min", "value": "min"},
|
871 |
+
{"label": "Sum", "value": "sum"},
|
872 |
+
{"label": "std", "value": "std"},
|
873 |
+
{"label": "var", "value": "var"},
|
874 |
+
{"label": "quantile", "value": "quantile"},
|
875 |
+
],
|
876 |
+
value="mean",
|
877 |
+
),
|
878 |
+
],
|
879 |
+
className="col-lg-6",
|
880 |
+
style={"padding": "1%"},
|
881 |
+
),
|
882 |
+
],
|
883 |
+
className="row",
|
884 |
+
# style = {'padding-top':'1%'}
|
885 |
+
),
|
886 |
+
html.Div(
|
887 |
+
[
|
888 |
+
html.Button("Generate Images", id="generate-button"),
|
889 |
+
html.Br(),
|
890 |
+
html.Hr(),
|
891 |
+
],
|
892 |
+
className="col-lg-12",
|
893 |
+
style={"margin-bottom": "1%"},
|
894 |
+
),
|
895 |
+
html.Div(
|
896 |
+
[
|
897 |
+
html.H3("B"),
|
898 |
+
],
|
899 |
+
className="col-lg-12",
|
900 |
+
style={"padding-top": "1%", "padding-left": "1%"},
|
901 |
+
),
|
902 |
+
html.Div(
|
903 |
+
[
|
904 |
+
html.Div(
|
905 |
+
[
|
906 |
+
html.P("Select visualisation name:"),
|
907 |
+
dcc.Dropdown(id="visualisation-select"),
|
908 |
+
],
|
909 |
+
className="col-lg-6",
|
910 |
+
style={"padding": "1%"},
|
911 |
+
),
|
912 |
+
html.Div(
|
913 |
+
[
|
914 |
+
html.P("Select your palette:"),
|
915 |
+
dcc.Dropdown(
|
916 |
+
id="platter-dropdown",
|
917 |
+
options=colorscales,
|
918 |
+
value="viridis",
|
919 |
+
),
|
920 |
+
],
|
921 |
+
className="col-lg-6",
|
922 |
+
style={"padding": "1%"},
|
923 |
+
),
|
924 |
+
],
|
925 |
+
className="row",
|
926 |
+
# style = {'padding-top':'1%'}
|
927 |
+
),
|
928 |
+
html.Div(
|
929 |
+
[
|
930 |
+
html.Hr(),
|
931 |
+
html.H3("Results"),
|
932 |
+
dcc.Graph(id="graph"),
|
933 |
+
],
|
934 |
+
className="col-lg-12",
|
935 |
+
style={"padding-top": "1%"},
|
936 |
+
),
|
937 |
+
# html.Div(
|
938 |
+
# className="dashapp-footer",
|
939 |
+
# children=[
|
940 |
+
# html.Div(f"Copyright @ {today.strftime('%Y')} Sydney Informatics Hub (SIH)", className="dashapp-footer--copyright")
|
941 |
+
# ]
|
942 |
+
# ),
|
943 |
+
],
|
944 |
+
className="container-fluid",
|
945 |
+
)
|
946 |
+
|
947 |
+
return layout
|
libs/utils.py
ADDED
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
import logging
|
3 |
+
|
4 |
+
|
5 |
+
def setup_logging():
|
6 |
+
logging.basicConfig(
|
7 |
+
format="%(asctime)s %(module)s %(levelname)s : %(message)s", level=logging.INFO
|
8 |
+
)
|
9 |
+
|
10 |
+
|
11 |
+
# -*- coding: utf-8 -*-
|
12 |
+
def verbose(value, thresh=1, ignore: list = [], **outputs):
|
13 |
+
"""Verbose outputs.
|
14 |
+
|
15 |
+
Parameters
|
16 |
+
----------
|
17 |
+
value : int
|
18 |
+
An integer showing the verbose level of an output. Higher verbose value means less important.
|
19 |
+
If the object's or script's initialized verbose threshold is equal to or lower than this value, it will print.
|
20 |
+
thresh : int
|
21 |
+
ignore : list
|
22 |
+
A list of outout types to ignore. (Debug, Warning, Error, etc.)
|
23 |
+
|
24 |
+
**outputs : kword arguments, 'key'='value'
|
25 |
+
Outputs with keys. Keys will be also printed. More than one key and output pair can be supplied.
|
26 |
+
"""
|
27 |
+
for output in outputs.items():
|
28 |
+
if output[0] not in ignore:
|
29 |
+
if value <= thresh:
|
30 |
+
print("\n> " + str(output[0]) + ":", output[1])
|
31 |
+
# event_log.append([str(datetime.now())+'\n> '+str(output[0])+':',output[1]])
|
model/__init__.py
ADDED
File without changes
|
model/client.py
ADDED
File without changes
|
model/farm.py
ADDED
File without changes
|
model/job.py
ADDED
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
import datetime as dt
|
3 |
+
|
4 |
+
from marshmallow import Schema, fields
|
5 |
+
|
6 |
+
|
7 |
+
class Job(object):
|
8 |
+
def __init__(self, description, amount, type):
|
9 |
+
self.description = description
|
10 |
+
self.time_start = 1
|
11 |
+
self.created_at = dt.datetime.now()
|
12 |
+
self.type = type
|
13 |
+
|
14 |
+
def __repr__(self):
|
15 |
+
return "<Transaction(name={self.description!r})>".format(self=self)
|
16 |
+
|
17 |
+
|
18 |
+
class TransactionSchema(Schema):
|
19 |
+
description = fields.Str()
|
20 |
+
amount = fields.Number()
|
21 |
+
created_at = fields.Date()
|
22 |
+
type = fields.Str()
|
model/project.py
ADDED
File without changes
|
register/__init__.py
ADDED
File without changes
|
requirements/requirements-dev-dash.txt
ADDED
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
matplotlib
|
2 |
+
dash
|
3 |
+
dash-bootstrap-components
|
4 |
+
netCDF4
|
5 |
+
tqdm
|
6 |
+
fiona
|
7 |
+
pandas
|
8 |
+
rasterio
|
9 |
+
shapely
|
10 |
+
numpy
|
requirements/requirements-dev-vis.txt
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
pillow
|
2 |
+
numpy
|
3 |
+
matplotlib
|
4 |
+
basemap-data
|
5 |
+
basemap-data-hires
|
6 |
+
basemap
|
7 |
+
netCDF4
|
8 |
+
earthengine-api
|
9 |
+
geopandas
|
10 |
+
fiona
|
11 |
+
pandas
|
12 |
+
rasterio
|
13 |
+
shapely
|
14 |
+
tqdm
|
15 |
+
holoviews
|
16 |
+
bokeh
|
17 |
+
datashader
|
18 |
+
panel
|
19 |
+
param
|
20 |
+
dash
|
21 |
+
dash-bootstrap-components
|
22 |
+
flask
|
23 |
+
flask-restful
|
24 |
+
marshmallow
|
25 |
+
Flask-SQLAlchemy
|
26 |
+
flask-bcrypt
|
requirements/requirements-vis-system.txt
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
sudo apt install python3-dev mysql-server default-libmysqlclient-dev build-essential
|
requirements/requuirements-gcp.txt
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
google-api-core
|
2 |
+
google-cloud-shell
|
3 |
+
google-cloud-batch
|
4 |
+
google-cloud-datastore
|
5 |
+
|
scripts/__init__.py
ADDED
File without changes
|
scripts/analyse.py
ADDED
@@ -0,0 +1,468 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import argparse
|
4 |
+
import gc
|
5 |
+
import glob
|
6 |
+
import logging
|
7 |
+
import os
|
8 |
+
import traceback
|
9 |
+
from datetime import timedelta
|
10 |
+
from typing import Dict
|
11 |
+
|
12 |
+
import netCDF4
|
13 |
+
import numpy as np
|
14 |
+
import pandas as pd
|
15 |
+
import rasterio
|
16 |
+
|
17 |
+
from libs.utils import setup_logging
|
18 |
+
from libs.utils import verbose as vprint
|
19 |
+
|
20 |
+
setup_logging()
|
21 |
+
log = logging.getLogger(__name__)
|
22 |
+
CONFIG = {}
|
23 |
+
V = 1
|
24 |
+
V_IGNORE = [] # Debug, Warning, Error
|
25 |
+
# print(os.getcwd())
|
26 |
+
|
27 |
+
|
28 |
+
def get_historic_agg(
|
29 |
+
input_dir: str,
|
30 |
+
historic_years: int,
|
31 |
+
current_window_start: str,
|
32 |
+
current_window_end: str,
|
33 |
+
layer_name: str,
|
34 |
+
agg_window: str = "mean",
|
35 |
+
agg_history: str = "mean",
|
36 |
+
) -> np.ndarray:
|
37 |
+
"""Get the historic mean for a given window_start and window_end dates.
|
38 |
+
|
39 |
+
Parameters
|
40 |
+
----------
|
41 |
+
input_dir : str
|
42 |
+
Path to the directory containing the netcdf files.
|
43 |
+
historic_years : int
|
44 |
+
Number of historic years to consider for the mean.
|
45 |
+
current_window_start : str
|
46 |
+
Start date of the current window. Format: YYYY-MM-DD.
|
47 |
+
current_window_end : str
|
48 |
+
End date of the current window. Format: YYYY-MM-DD.
|
49 |
+
layer_name : str
|
50 |
+
Soil layer to consider for the mean.
|
51 |
+
agg_window : str
|
52 |
+
Aggregation method for the window. Default is "mean". Possible values: "mean", "median", "max", "min", "std", "var".
|
53 |
+
agg_history : str
|
54 |
+
Aggregation method for the historic years. Default is "mean". Possible values: "mean", "median", "max", "min", "std", "var".
|
55 |
+
|
56 |
+
Returns
|
57 |
+
-------
|
58 |
+
np.ndarray
|
59 |
+
Array of the historic mean for the given window_start and window_end dates for the historic years.
|
60 |
+
|
61 |
+
Raises
|
62 |
+
------
|
63 |
+
FileNotFoundError
|
64 |
+
If the file for the historic year is not found. Possible solutions:
|
65 |
+
- The historic year should be modelled before calling this function.
|
66 |
+
- The path to the historic year should be changed.
|
67 |
+
- Calculate for a more recent historic year by reducing historic_years value.
|
68 |
+
"""
|
69 |
+
|
70 |
+
# Get the window_start year
|
71 |
+
window_start_year = pd.to_datetime(current_window_start).year
|
72 |
+
window_end_year = pd.to_datetime(current_window_end).year
|
73 |
+
|
74 |
+
# Get the first year
|
75 |
+
first_year = window_start_year - historic_years
|
76 |
+
|
77 |
+
# Check if file exists for this year
|
78 |
+
if os.path.exists(os.path.join(input_dir, f"model_{first_year}.nc")):
|
79 |
+
# Get the list of historic windows
|
80 |
+
historic_agg = {}
|
81 |
+
for year in range(1, historic_years + 1):
|
82 |
+
args = {
|
83 |
+
"input_dir": input_dir,
|
84 |
+
"window_start": f"{window_start_year-year}{current_window_start[4:]}",
|
85 |
+
"window_end": f"{window_end_year-year}{current_window_end[4:]}",
|
86 |
+
"layer_name": layer_name,
|
87 |
+
"agg": agg_window,
|
88 |
+
}
|
89 |
+
# Get the range mean
|
90 |
+
historic_agg[window_start_year - year] = get_range_agg(**args)
|
91 |
+
historic_agg_np = np.array([historic_agg[year] for year in historic_agg])
|
92 |
+
# Get the aggregation of the historic years
|
93 |
+
if agg_history == "mean":
|
94 |
+
historic_agg_np = np.mean(historic_agg_np, axis=0)
|
95 |
+
elif agg_history == "median":
|
96 |
+
historic_agg_np = np.median(historic_agg_np, axis=0)
|
97 |
+
elif agg_history == "max":
|
98 |
+
historic_agg_np = np.max(historic_agg_np, axis=0)
|
99 |
+
elif agg_history == "min":
|
100 |
+
historic_agg_np = np.min(historic_agg_np, axis=0)
|
101 |
+
elif agg_history == "std":
|
102 |
+
historic_agg_np = np.std(historic_agg_np, axis=0)
|
103 |
+
elif agg_history == "var":
|
104 |
+
historic_agg_np = np.var(historic_agg_np, axis=0)
|
105 |
+
elif agg_history == "sum":
|
106 |
+
historic_agg_np = np.sum(historic_agg_np, axis=0)
|
107 |
+
else:
|
108 |
+
raise ValueError(
|
109 |
+
f"Invalid aggregation method: {agg_history}. Possible values: mean, median, max, min, std, var, sum."
|
110 |
+
)
|
111 |
+
return historic_agg_np
|
112 |
+
else:
|
113 |
+
raise FileNotFoundError(
|
114 |
+
f"File not found for the historic data: {os.path.join(input_dir,f'model_{first_year}.nc')}. Make sure the path is correct and the historic year for the requested year is modelled before calling this function."
|
115 |
+
)
|
116 |
+
|
117 |
+
|
118 |
+
def get_range_agg(
|
119 |
+
input_dir: str,
|
120 |
+
window_start: str,
|
121 |
+
window_end: str,
|
122 |
+
layer_name: str,
|
123 |
+
agg: str = "mean",
|
124 |
+
) -> np.ndarray:
|
125 |
+
"""Get the mean for a given window_start and window_end dates.
|
126 |
+
|
127 |
+
Parameters
|
128 |
+
----------
|
129 |
+
input_dir : str
|
130 |
+
Path to the directory containing the netcdf files.
|
131 |
+
window_start : str
|
132 |
+
Start date of the window. Format: YYYY-MM-DD.
|
133 |
+
window_end : str
|
134 |
+
End date of the window. Format: YYYY-MM-DD.
|
135 |
+
layer_name : str
|
136 |
+
Soil layer to consider for the mean.
|
137 |
+
agg : str
|
138 |
+
Aggregation method to use. Possible values: mean, median, max, min, std.
|
139 |
+
|
140 |
+
Returns
|
141 |
+
-------
|
142 |
+
np.ndarray
|
143 |
+
Mean raster for the given window_start and window_end dates.
|
144 |
+
"""
|
145 |
+
|
146 |
+
# Get the list of dates between two dates if date_from and date_to
|
147 |
+
dates = pd.DataFrame(
|
148 |
+
pd.date_range(
|
149 |
+
pd.to_datetime(window_start),
|
150 |
+
pd.to_datetime(window_end) - timedelta(days=1),
|
151 |
+
freq="d",
|
152 |
+
),
|
153 |
+
columns=["date"],
|
154 |
+
) # .strftime('%Y-%m-%d')
|
155 |
+
dates["dayofyear"] = dates["date"].dt.dayofyear - 1
|
156 |
+
dates["year"] = dates["date"].dt.year
|
157 |
+
dates["str_dates"] = dates["date"].dt.strftime("%Y-%m-%d")
|
158 |
+
|
159 |
+
yearly_dates = dates.groupby("year")["dayofyear"].apply(list).to_dict()
|
160 |
+
|
161 |
+
data_l = list()
|
162 |
+
# For each year, get the data for layer_name for the dates specified in yearly_dates
|
163 |
+
for year in yearly_dates:
|
164 |
+
# read the year file
|
165 |
+
nc_y = netCDF4.Dataset(os.path.join(input_dir, f"model_{year}.nc"))
|
166 |
+
|
167 |
+
vprint(
|
168 |
+
1,
|
169 |
+
V,
|
170 |
+
V_IGNORE,
|
171 |
+
Debug=f"getting data for year: {year} from layer: {layer_name}...",
|
172 |
+
)
|
173 |
+
# Get the data for the layer_name
|
174 |
+
data = nc_y.variables[layer_name][:, :, :]
|
175 |
+
|
176 |
+
# Get the data for the dates
|
177 |
+
days = yearly_dates[year]
|
178 |
+
|
179 |
+
data = data[days, :, :]
|
180 |
+
|
181 |
+
data_l.append(data)
|
182 |
+
nc_y.close()
|
183 |
+
del data
|
184 |
+
gc.collect()
|
185 |
+
|
186 |
+
# Concat data for all years
|
187 |
+
data_concat = np.concatenate(data_l, axis=0)
|
188 |
+
data_concat.shape
|
189 |
+
if agg == "mean":
|
190 |
+
# Get the mean raster for the range
|
191 |
+
data_agg = np.mean(data_concat, axis=0)
|
192 |
+
elif agg == "median":
|
193 |
+
# Get the median raster for the range
|
194 |
+
data_agg = np.median(data_concat, axis=0)
|
195 |
+
elif agg == "max":
|
196 |
+
# Get the max raster for the range
|
197 |
+
data_agg = np.max(data_concat, axis=0)
|
198 |
+
elif agg == "min":
|
199 |
+
# Get the min raster for the range
|
200 |
+
data_agg = np.min(data_concat, axis=0)
|
201 |
+
elif agg == "std":
|
202 |
+
# Get the std raster for the range
|
203 |
+
data_agg = np.std(data_concat, axis=0)
|
204 |
+
elif agg == "var":
|
205 |
+
# Get the var raster for the range
|
206 |
+
data_agg = np.var(data_concat, axis=0)
|
207 |
+
elif agg == "sum":
|
208 |
+
# Get the sum raster for the range
|
209 |
+
data_agg = np.sum(data_concat, axis=0)
|
210 |
+
else:
|
211 |
+
raise ValueError(
|
212 |
+
f"agg should be one of mean, median, max, min, std, var, sum. {agg} was provided."
|
213 |
+
)
|
214 |
+
print("done.")
|
215 |
+
return data_agg
|
216 |
+
|
217 |
+
|
218 |
+
def save(path, array, profile):
|
219 |
+
"""Save the array as a raster.
|
220 |
+
|
221 |
+
Parameters
|
222 |
+
----------
|
223 |
+
path : str
|
224 |
+
Path to the raster to save.
|
225 |
+
array : np.ndarray
|
226 |
+
Array to save as a raster.
|
227 |
+
profile : dict
|
228 |
+
Profile of the raster to save.
|
229 |
+
"""
|
230 |
+
|
231 |
+
with rasterio.open(path, "w", **profile) as dst:
|
232 |
+
dst.write(array, 1)
|
233 |
+
|
234 |
+
|
235 |
+
def analyse(
|
236 |
+
input,
|
237 |
+
window_start,
|
238 |
+
window_end,
|
239 |
+
historic_years: int,
|
240 |
+
layer: str,
|
241 |
+
match_raster: str = None,
|
242 |
+
output: str = None,
|
243 |
+
agg_history: str = "mean",
|
244 |
+
agg_window: str = "mean",
|
245 |
+
comparison: str = "diff",
|
246 |
+
) -> Dict[str, str]:
|
247 |
+
"""Main function to run the script.
|
248 |
+
|
249 |
+
Parameters
|
250 |
+
----------
|
251 |
+
input : str
|
252 |
+
Path to the input raster.
|
253 |
+
window_start : str
|
254 |
+
Start date of the window. Format: YYYY-MM-DD.
|
255 |
+
window_end : str
|
256 |
+
End date of the window. Format: YYYY-MM-DD.
|
257 |
+
historic_years : int
|
258 |
+
Number of historic years to use for the comparison.
|
259 |
+
layer : str
|
260 |
+
Soil layer to consider for the comparison.
|
261 |
+
match_raster : str
|
262 |
+
Path to the match raster. Default: None. If None, the match raster will be searched in the et_pp directory based on the input directory.
|
263 |
+
output : str
|
264 |
+
Path to the output raster. Default: None. If None, the output raster will be saved in the same directory as the input raster.
|
265 |
+
agg_history : str
|
266 |
+
Aggregation method to use for the historic years. Possible values: mean, median, max, min, std. Default: mean.
|
267 |
+
agg_window : str
|
268 |
+
Aggregation method to use for the window. Possible values: mean, median, max, min, std. Default: mean.
|
269 |
+
comparison : str
|
270 |
+
Comparison method to use. Possible values: diff, ratio. Default: diff.
|
271 |
+
|
272 |
+
Returns
|
273 |
+
-------
|
274 |
+
Dict[str,str]
|
275 |
+
Dictionary with the path to the output rasters.
|
276 |
+
"""
|
277 |
+
|
278 |
+
if output is None:
|
279 |
+
output = os.path.join(input, "analysis")
|
280 |
+
|
281 |
+
# Create the output directory if it does not exist
|
282 |
+
if not os.path.exists(output):
|
283 |
+
os.makedirs(output)
|
284 |
+
|
285 |
+
if match_raster is None:
|
286 |
+
match_raster = os.path.join(os.path.dirname(input), "et_pp")
|
287 |
+
|
288 |
+
files = glob.glob(os.path.join(match_raster, f"{window_start[:7]}*.tif"))
|
289 |
+
if len(files) == 0:
|
290 |
+
files = glob.glob(os.path.join(match_raster, f"{window_end[:7]}*.tif"))
|
291 |
+
if len(files) == 0:
|
292 |
+
vprint(
|
293 |
+
1,
|
294 |
+
V,
|
295 |
+
V_IGNORE,
|
296 |
+
Debug=f"Expanding the search for match raster file to find e closer date to {window_start[:5]}...",
|
297 |
+
)
|
298 |
+
files = glob.glob(os.path.join(match_raster, f"{window_start[:5]}*.tif"))
|
299 |
+
if len(files) == 0:
|
300 |
+
vprint(
|
301 |
+
1,
|
302 |
+
V,
|
303 |
+
V_IGNORE,
|
304 |
+
Debug=f"Expanding the search further for match raster file to find e closer date to {window_end[:5]}...",
|
305 |
+
)
|
306 |
+
files = glob.glob(os.path.join(match_raster, f"{window_end[:5]}*.tif"))
|
307 |
+
if len(files) == 0:
|
308 |
+
raise FileNotFoundError(
|
309 |
+
f"Could not find any matching raster in {match_raster} for the range of dates given at {window_start} / {window_end}!"
|
310 |
+
)
|
311 |
+
print(f"Found {len(files)} matching raster file {files[0]}.")
|
312 |
+
match_raster = files[0]
|
313 |
+
|
314 |
+
with rasterio.open(match_raster) as src:
|
315 |
+
profile = src.profile
|
316 |
+
|
317 |
+
# Get the layers
|
318 |
+
layer = layer
|
319 |
+
# Get the historic aggregated data
|
320 |
+
historic_data = get_historic_agg(
|
321 |
+
input_dir=input,
|
322 |
+
historic_years=historic_years,
|
323 |
+
current_window_start=window_start,
|
324 |
+
current_window_end=window_end,
|
325 |
+
agg_window=agg_window,
|
326 |
+
agg_history=agg_history,
|
327 |
+
layer_name=layer,
|
328 |
+
)
|
329 |
+
# Get aggregated current window data
|
330 |
+
current_data = get_range_agg(
|
331 |
+
input_dir=input,
|
332 |
+
window_start=window_start,
|
333 |
+
window_end=window_end,
|
334 |
+
agg=agg_window,
|
335 |
+
layer_name=layer,
|
336 |
+
)
|
337 |
+
|
338 |
+
# Compare the two rasters
|
339 |
+
if comparison == "diff":
|
340 |
+
delta = current_data - historic_data
|
341 |
+
else:
|
342 |
+
raise NotImplementedError(
|
343 |
+
f"comparison should be diff. {comparison} was provided."
|
344 |
+
)
|
345 |
+
|
346 |
+
# Save the rasters
|
347 |
+
historic_raster = os.path.join(
|
348 |
+
output,
|
349 |
+
f"historic-{window_start.replace('-','_')}-{window_end.replace('-','_')}-{layer}-w_{agg_window}-h_{agg_history}-y_{historic_years}.tif",
|
350 |
+
)
|
351 |
+
current_raster = os.path.join(
|
352 |
+
output,
|
353 |
+
f"current-{window_start.replace('-','_')}-{window_end.replace('-','_')}-{layer}-w_{agg_window}.tif",
|
354 |
+
)
|
355 |
+
delta_raster = os.path.join(
|
356 |
+
output,
|
357 |
+
f"delta-{window_start.replace('-','_')}-{window_end.replace('-','_')}-{layer}-w_{agg_window}-h_{agg_history}-y_{historic_years}.tif",
|
358 |
+
)
|
359 |
+
|
360 |
+
save(historic_raster, historic_data, profile)
|
361 |
+
save(current_raster, current_data, profile)
|
362 |
+
save(delta_raster, delta, profile)
|
363 |
+
|
364 |
+
# # Visualise the rasters
|
365 |
+
# # Read the saved rasters
|
366 |
+
# with rasterio.open(historic_raster) as src:
|
367 |
+
# historic_raster = src.read(1)
|
368 |
+
# with rasterio.open(current_raster) as src:
|
369 |
+
# current_raster = src.read(1)
|
370 |
+
# with rasterio.open(delta_raster) as src:
|
371 |
+
# delta_raster = src.read(1)
|
372 |
+
|
373 |
+
# # Plot the rasters
|
374 |
+
return {
|
375 |
+
"historic_raster": historic_raster,
|
376 |
+
"current_raster": current_raster,
|
377 |
+
"delta_raster": delta_raster,
|
378 |
+
}
|
379 |
+
|
380 |
+
|
381 |
+
if __name__ == "__main__":
|
382 |
+
# Load Configs
|
383 |
+
parser = argparse.ArgumentParser(
|
384 |
+
description="Download rainfall data from Google Earth Engine for a range of dates.",
|
385 |
+
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
|
386 |
+
)
|
387 |
+
parser.add_argument(
|
388 |
+
"-i",
|
389 |
+
"--input",
|
390 |
+
help="Absolute or relative path to the netcdf data ending with .nc. By dfault it is set to data.nc",
|
391 |
+
default="data.nc",
|
392 |
+
)
|
393 |
+
parser.add_argument(
|
394 |
+
"-l",
|
395 |
+
"--layer",
|
396 |
+
help="Soil layer to visualise. Default is all. Select between SM1 to SM5 or DD.",
|
397 |
+
default="all",
|
398 |
+
)
|
399 |
+
parser.add_argument("-s", "--window_start", help="Window start date YYYY-MM-DD.")
|
400 |
+
parser.add_argument("-e", "--window_end", help="Window end date YYYY-MM-DD.")
|
401 |
+
parser.add_argument(
|
402 |
+
"-y", "--historic_years", help="Number of years to go back in time.", default=2
|
403 |
+
)
|
404 |
+
parser.add_argument(
|
405 |
+
"-a",
|
406 |
+
"--agg_history",
|
407 |
+
help="Aggregation method to use for the historic data. Possible values: mean, median, max, min, std, var, sum.",
|
408 |
+
default="mean",
|
409 |
+
)
|
410 |
+
parser.add_argument(
|
411 |
+
"-g",
|
412 |
+
"--agg_window",
|
413 |
+
help="Aggregation method to use for the window range data. Possible values: mean, median, max, min, std, var, sum.",
|
414 |
+
default="mean",
|
415 |
+
)
|
416 |
+
parser.add_argument(
|
417 |
+
"-c",
|
418 |
+
"--comparison",
|
419 |
+
help="Comparison method to use for the window range data. Possible values: show, diff.",
|
420 |
+
default="show",
|
421 |
+
)
|
422 |
+
parser.add_argument(
|
423 |
+
"-o",
|
424 |
+
"--output",
|
425 |
+
help="Output directory to save the output files. Default is the input directory.",
|
426 |
+
)
|
427 |
+
parser.add_argument(
|
428 |
+
"-m",
|
429 |
+
"--match_raster",
|
430 |
+
help="Raster to match the output to. Default is the input raster.",
|
431 |
+
)
|
432 |
+
|
433 |
+
args = parser.parse_args()
|
434 |
+
|
435 |
+
# args.input ="/home/sahand/Projects/PIPE-3788 GRDC SoilWaterNow Deployment/work/v3/Arawa 2019-2023/c8/1af25ced023e58c46f4403a155210d/soilwatermodel v3"
|
436 |
+
# args.window_start = "2022-12-20"
|
437 |
+
# args.window_end = "2023-01-10"
|
438 |
+
# args.historic_years = 3
|
439 |
+
# args.agg_window = "mean"
|
440 |
+
# args.agg_history = "mean"
|
441 |
+
# args.comparison = "diff"
|
442 |
+
# args.layer = "SM2"
|
443 |
+
# args.output = None
|
444 |
+
|
445 |
+
try:
|
446 |
+
analyse(
|
447 |
+
input=args.input,
|
448 |
+
window_start=args.window_start,
|
449 |
+
window_end=args.window_end,
|
450 |
+
historic_years=args.historic_years,
|
451 |
+
agg_window=args.agg_window,
|
452 |
+
agg_history=args.agg_history,
|
453 |
+
comparison=args.comparison,
|
454 |
+
layer=args.layer,
|
455 |
+
output=args.output,
|
456 |
+
match_raster=args.match_raster,
|
457 |
+
)
|
458 |
+
|
459 |
+
except Exception as e:
|
460 |
+
vprint(
|
461 |
+
0,
|
462 |
+
V,
|
463 |
+
V_IGNORE,
|
464 |
+
Error="Failed to execute the main function:",
|
465 |
+
ErrorMessage=e,
|
466 |
+
)
|
467 |
+
traceback.print_exc()
|
468 |
+
raise e
|
scripts/animate.py
ADDED
@@ -0,0 +1,214 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import argparse
|
4 |
+
import gc
|
5 |
+
import glob
|
6 |
+
import logging
|
7 |
+
import multiprocessing
|
8 |
+
import os
|
9 |
+
import traceback
|
10 |
+
|
11 |
+
import netCDF4
|
12 |
+
import numpy as np
|
13 |
+
import PIL
|
14 |
+
from mpl_toolkits.basemap import Basemap
|
15 |
+
|
16 |
+
from libs.utils import setup_logging
|
17 |
+
from libs.utils import verbose as vprint
|
18 |
+
|
19 |
+
setup_logging()
|
20 |
+
log = logging.getLogger(__name__)
|
21 |
+
CONFIG = {}
|
22 |
+
V = 1
|
23 |
+
V_IGNORE = [] # Debug, Warning, Error
|
24 |
+
# print(os.getcwd())
|
25 |
+
|
26 |
+
|
27 |
+
def vis(files, layers=["SM1", "SM2", "SM3", "SM4", "SM5", "DD"], days="all"):
|
28 |
+
"""Save the visualisation of the soil moisture content for given layers,
|
29 |
+
year files, and days.
|
30 |
+
|
31 |
+
Parameters
|
32 |
+
----------
|
33 |
+
files : list
|
34 |
+
Iterable of the absolute or relative paths to the netcdf data ending with .nc.
|
35 |
+
layers : list
|
36 |
+
Soil layers to visualise. Default is ["SM1","SM2","SM3","SM4","SM5","DD"].
|
37 |
+
days : list / str
|
38 |
+
Iterable days of year range to visualise. Default is 'all'.
|
39 |
+
|
40 |
+
Returns
|
41 |
+
-------
|
42 |
+
None
|
43 |
+
"""
|
44 |
+
inputs = [(file, layer, days) for file in files for layer in layers]
|
45 |
+
num_processes = multiprocessing.cpu_count()
|
46 |
+
with multiprocessing.Pool(processes=max(1, int(num_processes / 2))) as pool:
|
47 |
+
# pool.starmap(vis_process, [(f, l, days) for f in files for l in layers])
|
48 |
+
# pool.apply_async(vis_process, args=inputs)
|
49 |
+
for result in pool.imap_unordered(vis_process, inputs):
|
50 |
+
print(f"Result file: {result}", flush=True)
|
51 |
+
# pool.close()
|
52 |
+
# pool.join()
|
53 |
+
|
54 |
+
|
55 |
+
def vis_process(input_nc_layer_days):
|
56 |
+
"""Save the visualisation of the soil moisture content for a given layer,
|
57 |
+
year file, and days.
|
58 |
+
|
59 |
+
Parameters
|
60 |
+
----------
|
61 |
+
input_nc : str
|
62 |
+
Absolute or relative path to the netcdf data ending with .nc.
|
63 |
+
layer : str
|
64 |
+
Soil layer to visualise.
|
65 |
+
days : list / str
|
66 |
+
Iterable days of year range to visualise.
|
67 |
+
|
68 |
+
Returns
|
69 |
+
-------
|
70 |
+
None
|
71 |
+
"""
|
72 |
+
import matplotlib
|
73 |
+
import matplotlib.pyplot as plt
|
74 |
+
|
75 |
+
input_nc, layer, days = input_nc_layer_days
|
76 |
+
nc = netCDF4.Dataset(input_nc)
|
77 |
+
# ref = nc.variables["spatial_ref"]
|
78 |
+
# vprint(1, V, V_IGNORE, Debug=ref)
|
79 |
+
lons = nc.variables["x"][:]
|
80 |
+
lats = nc.variables["y"][:]
|
81 |
+
|
82 |
+
mp = Basemap(
|
83 |
+
projection="merc",
|
84 |
+
llcrnrlon=lons.min() - 0.02, # lower longitude
|
85 |
+
llcrnrlat=lats.min() - 0.02, # lower latitude
|
86 |
+
urcrnrlon=lons.max() + 0.02, # uppper longitude
|
87 |
+
urcrnrlat=lats.max() + 0.02, # uppper latitude
|
88 |
+
resolution="i",
|
89 |
+
)
|
90 |
+
lon, lat = np.meshgrid(lons, lats) # this converts coordinates into 2D arrray
|
91 |
+
x, y = mp(lon, lat) # mapping them together
|
92 |
+
|
93 |
+
layer_data = nc.variables[layer][:]
|
94 |
+
if days == "all":
|
95 |
+
days = np.arange(0, layer_data.shape[0])
|
96 |
+
|
97 |
+
# Generate daily images
|
98 |
+
for i in days:
|
99 |
+
day = i + 1
|
100 |
+
matplotlib.use("Agg")
|
101 |
+
plt.figure(figsize=(6, 8)) # figure size
|
102 |
+
c_scheme = mp.pcolor(x, y, np.squeeze(layer_data[i, :, :]), cmap="jet")
|
103 |
+
# mp.etopo()
|
104 |
+
mp.shadedrelief()
|
105 |
+
mp.drawcoastlines()
|
106 |
+
mp.drawstates()
|
107 |
+
mp.drawcountries()
|
108 |
+
|
109 |
+
mp.colorbar(c_scheme, location="right", pad="10%")
|
110 |
+
|
111 |
+
plt.title("Soil moisture content for day " + str(day) + " of the year")
|
112 |
+
plt.clim(layer_data.min(), layer_data.max())
|
113 |
+
|
114 |
+
plt.savefig(f".tmp/{layer}_{str(day)}.jpg")
|
115 |
+
plt.clf()
|
116 |
+
plt.close("all")
|
117 |
+
|
118 |
+
# Generate gif animation
|
119 |
+
image_frames = [] # creating a empty list to be appended later on
|
120 |
+
for day in days:
|
121 |
+
new_fram = PIL.Image.open(f".tmp/{layer}_{str(day + 1)}.jpg")
|
122 |
+
image_frames.append(new_fram)
|
123 |
+
o_name = input_nc.replace(".nc", f"_{layer}.gif")
|
124 |
+
image_frames[0].save(
|
125 |
+
o_name,
|
126 |
+
format="GIF",
|
127 |
+
append_images=image_frames[1:],
|
128 |
+
save_all=True,
|
129 |
+
duration=150,
|
130 |
+
loop=0,
|
131 |
+
)
|
132 |
+
|
133 |
+
# Delete temporary files
|
134 |
+
del (
|
135 |
+
nc,
|
136 |
+
layer_data,
|
137 |
+
mp,
|
138 |
+
image_frames,
|
139 |
+
c_scheme,
|
140 |
+
plt,
|
141 |
+
new_fram,
|
142 |
+
lon,
|
143 |
+
lat,
|
144 |
+
x,
|
145 |
+
y,
|
146 |
+
lons,
|
147 |
+
lats,
|
148 |
+
day,
|
149 |
+
input_nc,
|
150 |
+
layer,
|
151 |
+
days,
|
152 |
+
i,
|
153 |
+
)
|
154 |
+
gc.collect()
|
155 |
+
return o_name
|
156 |
+
|
157 |
+
|
158 |
+
if __name__ == "__main__":
|
159 |
+
# Load Configs
|
160 |
+
parser = argparse.ArgumentParser(
|
161 |
+
description="Download rainfall data from Google Earth Engine for a range of dates.",
|
162 |
+
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
|
163 |
+
)
|
164 |
+
parser.add_argument(
|
165 |
+
"-i",
|
166 |
+
"--input",
|
167 |
+
help="Absolute or relative path to the netcdf data ending with .nc. By dfault it is set to data.nc",
|
168 |
+
default="data.nc",
|
169 |
+
)
|
170 |
+
parser.add_argument(
|
171 |
+
"-l",
|
172 |
+
"--layer",
|
173 |
+
help="Soil layer to visualise. Default is all. Select between SM1 to SM5 or DD.",
|
174 |
+
default="all",
|
175 |
+
)
|
176 |
+
parser.add_argument(
|
177 |
+
"-d",
|
178 |
+
"--days",
|
179 |
+
help="Iterable days of year range to visualise. Default is all.",
|
180 |
+
default="all",
|
181 |
+
)
|
182 |
+
|
183 |
+
args = parser.parse_args()
|
184 |
+
|
185 |
+
try:
|
186 |
+
# Get the files
|
187 |
+
if os.path.isdir(args.input):
|
188 |
+
files = glob.glob(os.path.join(args.input, "*.nc"))
|
189 |
+
else:
|
190 |
+
files = [args.input]
|
191 |
+
|
192 |
+
# Get the layers
|
193 |
+
if args.layer == "all":
|
194 |
+
layers = ["SM1", "SM2", "SM3", "SM4", "SM5", "DD"]
|
195 |
+
else:
|
196 |
+
layers = str(args.layer).split(",")
|
197 |
+
|
198 |
+
# Get the days
|
199 |
+
if args.days == "all":
|
200 |
+
days = "all"
|
201 |
+
else:
|
202 |
+
days = str(args.days).split(",")
|
203 |
+
|
204 |
+
vis(files, layers, days)
|
205 |
+
except Exception as e:
|
206 |
+
vprint(
|
207 |
+
0,
|
208 |
+
V,
|
209 |
+
V_IGNORE,
|
210 |
+
Error="Failed to execute the main function:",
|
211 |
+
ErrorMessage=e,
|
212 |
+
)
|
213 |
+
traceback.print_exc()
|
214 |
+
raise e
|
scripts/assets/icons/fail.png
ADDED
![]() |
scripts/assets/icons/success.png
ADDED
![]() |
scripts/assets/icons/wait.png
ADDED
![]() |
scripts/display.py
ADDED
@@ -0,0 +1,651 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import argparse
|
4 |
+
import datetime
|
5 |
+
import logging
|
6 |
+
import os
|
7 |
+
import traceback
|
8 |
+
from typing import Dict, List
|
9 |
+
|
10 |
+
import numpy as np
|
11 |
+
import plotly.express as px
|
12 |
+
import rasterio
|
13 |
+
from dash import Dash, Input, Output, State, dcc, html
|
14 |
+
from dash.exceptions import PreventUpdate
|
15 |
+
|
16 |
+
from libs.utils import setup_logging
|
17 |
+
from libs.utils import verbose as vprint
|
18 |
+
from scripts.analyse import analyse
|
19 |
+
|
20 |
+
setup_logging()
|
21 |
+
log = logging.getLogger(__name__)
|
22 |
+
CONFIG = {}
|
23 |
+
V = 1
|
24 |
+
V_IGNORE = [] # Debug, Warning, Error
|
25 |
+
|
26 |
+
# ===============================================================================
|
27 |
+
# Soil Moisture Comparison Tool App Layout
|
28 |
+
# ===============================================================================
|
29 |
+
|
30 |
+
colorscales = px.colors.named_colorscales()
|
31 |
+
# external JavaScript files
|
32 |
+
external_scripts = [
|
33 |
+
"https://www.google-analytics.com/analytics.js",
|
34 |
+
{"src": "https://cdn.polyfill.io/v2/polyfill.min.js"},
|
35 |
+
{
|
36 |
+
"src": "https://cdnjs.cloudflare.com/ajax/libs/lodash.js/4.17.10/lodash.core.js",
|
37 |
+
"integrity": "sha256-Qqd/EfdABZUcAxjOkMi8eGEivtdTkh3b65xCZL4qAQA=",
|
38 |
+
"crossorigin": "anonymous",
|
39 |
+
},
|
40 |
+
]
|
41 |
+
|
42 |
+
# external CSS stylesheets
|
43 |
+
external_stylesheets = [
|
44 |
+
"https://codepen.io/chriddyp/pen/bWLwgP.css",
|
45 |
+
{
|
46 |
+
"href": "https://stackpath.bootstrapcdn.com/bootstrap/4.1.3/css/bootstrap.min.css",
|
47 |
+
"rel": "stylesheet",
|
48 |
+
"integrity": "sha384-MCw98/SFnGE8fJT3GXwEOngsV7Zt27NXFoaoApmYm81iuXoPkFOJwJ8ERdknLPMO",
|
49 |
+
"crossorigin": "anonymous",
|
50 |
+
},
|
51 |
+
]
|
52 |
+
|
53 |
+
|
54 |
+
app = Dash(
|
55 |
+
__name__,
|
56 |
+
external_scripts=external_scripts,
|
57 |
+
external_stylesheets=external_stylesheets,
|
58 |
+
title="Soil Moisture Comparison Tool",
|
59 |
+
update_title="Loading the tool...",
|
60 |
+
)
|
61 |
+
|
62 |
+
# farm_name = "Arawa"
|
63 |
+
# layer = "SM2"
|
64 |
+
today = datetime.datetime.today()
|
65 |
+
time_delta = datetime.timedelta(days=20)
|
66 |
+
|
67 |
+
FAIL_IMAGE = app.get_asset_url("icons/fail.png")
|
68 |
+
SUCCESS_IMAGE = app.get_asset_url("icons/success.png")
|
69 |
+
WAIT_IMAGE = app.get_asset_url("icons/wait.png")
|
70 |
+
|
71 |
+
current_working_directory = os.getcwd()
|
72 |
+
app.index_template = os.path.join(current_working_directory, "templates", "index.html")
|
73 |
+
# app.index_string = """
|
74 |
+
# <!DOCTYPE html>
|
75 |
+
# <html>
|
76 |
+
# <head>
|
77 |
+
# {%metas%}
|
78 |
+
# <title>{%title%}</title>
|
79 |
+
# {%favicon%}
|
80 |
+
# {%css%}
|
81 |
+
# </head>
|
82 |
+
# <body>
|
83 |
+
# <div class="col-12">
|
84 |
+
# <br>
|
85 |
+
# <h2>Soil Moisture Comparison Tool</h2>
|
86 |
+
# <br>
|
87 |
+
# <hr>
|
88 |
+
# </div>
|
89 |
+
# {%app_entry%}
|
90 |
+
# <footer>
|
91 |
+
# {%config%}
|
92 |
+
# {%scripts%}
|
93 |
+
# {%renderer%}
|
94 |
+
# </footer>
|
95 |
+
# <div class="col-12">
|
96 |
+
# <hr>
|
97 |
+
# <br>
|
98 |
+
# Copyright @ 2023 Sydney Informatics Hub (SIH)
|
99 |
+
# <br>
|
100 |
+
# </div>
|
101 |
+
# </body>
|
102 |
+
# </html>
|
103 |
+
# """
|
104 |
+
|
105 |
+
app.layout = html.Div(
|
106 |
+
[
|
107 |
+
# html.Div(
|
108 |
+
# className="app-header",
|
109 |
+
# children=[
|
110 |
+
# html.Div('Soil Moisture Comparison Tool', className="app-header--title")
|
111 |
+
# ]
|
112 |
+
# ),
|
113 |
+
dcc.Store(id="farm-name-session", storage_type="session"),
|
114 |
+
html.Div(
|
115 |
+
[
|
116 |
+
html.P(
|
117 |
+
"""This tool will use the produced datacubes to compare the soil moisture of a farm against historic data.
|
118 |
+
Please select the desired comaprison method and dates to make the comparison as in section A.
|
119 |
+
Then choose the visualisation in section B to see the results.""",
|
120 |
+
style={"font-size": "larger"},
|
121 |
+
),
|
122 |
+
html.Hr(),
|
123 |
+
html.H3("A"),
|
124 |
+
],
|
125 |
+
className="col-lg-12",
|
126 |
+
style={"padding-top": "1%", "padding-left": "1%"},
|
127 |
+
),
|
128 |
+
html.Div(
|
129 |
+
[
|
130 |
+
html.Div(
|
131 |
+
[
|
132 |
+
# html.P("Write farm name/ID:"),
|
133 |
+
dcc.Input(
|
134 |
+
id="farm-name",
|
135 |
+
type="text",
|
136 |
+
placeholder="Farm name",
|
137 |
+
style={"width": "80%"},
|
138 |
+
),
|
139 |
+
html.Img(
|
140 |
+
id="farm-image",
|
141 |
+
src=WAIT_IMAGE,
|
142 |
+
style={"width": "30px", "margin-left": "15px"},
|
143 |
+
),
|
144 |
+
],
|
145 |
+
className="col-lg-5",
|
146 |
+
# style = {'padding-top':'1%', 'padding-left':'1%'}
|
147 |
+
),
|
148 |
+
html.Div(
|
149 |
+
[
|
150 |
+
html.P(),
|
151 |
+
],
|
152 |
+
className="col-lg-7",
|
153 |
+
# style = {'padding-top':'1%', 'padding-left':'1%'}
|
154 |
+
),
|
155 |
+
],
|
156 |
+
className="row",
|
157 |
+
style={"padding-top": "1%", "padding-left": "1%"},
|
158 |
+
),
|
159 |
+
html.Div(
|
160 |
+
[
|
161 |
+
html.Div(
|
162 |
+
[
|
163 |
+
html.P("Select soil layer:"),
|
164 |
+
dcc.Dropdown(
|
165 |
+
id="layer-dropdown",
|
166 |
+
options=[
|
167 |
+
{"label": "SM1", "value": "SM1"},
|
168 |
+
{"label": "SM2", "value": "SM2"},
|
169 |
+
{"label": "SM3", "value": "SM3"},
|
170 |
+
{"label": "SM4", "value": "SM4"},
|
171 |
+
{"label": "SM5", "value": "SM5"},
|
172 |
+
{"label": "DD", "value": "DD"},
|
173 |
+
],
|
174 |
+
value="SM2",
|
175 |
+
),
|
176 |
+
],
|
177 |
+
className="col-lg-4",
|
178 |
+
style={"padding": "1%"},
|
179 |
+
),
|
180 |
+
html.Div(
|
181 |
+
[
|
182 |
+
html.P("Select the historic years to compare against:"),
|
183 |
+
dcc.Dropdown(
|
184 |
+
id="historic-dropdown",
|
185 |
+
options=[
|
186 |
+
{"label": year, "value": year} for year in range(1, 20)
|
187 |
+
],
|
188 |
+
value=2,
|
189 |
+
),
|
190 |
+
],
|
191 |
+
className="col-lg-4",
|
192 |
+
style={"padding": "1%"},
|
193 |
+
),
|
194 |
+
html.Div(
|
195 |
+
[
|
196 |
+
html.P("Select the most recent window of dates to analyse:"),
|
197 |
+
dcc.DatePickerRange(
|
198 |
+
id="window-select",
|
199 |
+
min_date_allowed=datetime.date(2000, 1, 1),
|
200 |
+
max_date_allowed=today.strftime("%Y-%m-%d"),
|
201 |
+
initial_visible_month=datetime.date(2023, 1, 1),
|
202 |
+
clearable=False,
|
203 |
+
display_format="YYYY-MM-DD",
|
204 |
+
start_date_placeholder_text="Start date",
|
205 |
+
end_date_placeholder_text="End date",
|
206 |
+
style={"width": "100%"},
|
207 |
+
),
|
208 |
+
],
|
209 |
+
className="col-lg-4",
|
210 |
+
style={"padding": "1%"},
|
211 |
+
),
|
212 |
+
],
|
213 |
+
className="row",
|
214 |
+
style={"padding-top": "1%"},
|
215 |
+
),
|
216 |
+
html.Div(
|
217 |
+
[
|
218 |
+
html.Div(
|
219 |
+
[
|
220 |
+
html.P("Select window aggregation method:"),
|
221 |
+
dcc.Dropdown(
|
222 |
+
id="w-aggregation-dropdown",
|
223 |
+
options=[
|
224 |
+
{"label": "Mean", "value": "mean"},
|
225 |
+
{"label": "Median", "value": "median"},
|
226 |
+
{"label": "Max", "value": "max"},
|
227 |
+
{"label": "Min", "value": "min"},
|
228 |
+
{"label": "Sum", "value": "sum"},
|
229 |
+
{"label": "std", "value": "std"},
|
230 |
+
{"label": "var", "value": "var"},
|
231 |
+
],
|
232 |
+
value="mean",
|
233 |
+
),
|
234 |
+
],
|
235 |
+
className="col-lg-6",
|
236 |
+
style={"padding": "1%"},
|
237 |
+
),
|
238 |
+
html.Div(
|
239 |
+
[
|
240 |
+
html.P("Select historic aggregation method:"),
|
241 |
+
dcc.Dropdown(
|
242 |
+
id="h-aggregation-dropdown",
|
243 |
+
options=[
|
244 |
+
{"label": "Mean", "value": "mean"},
|
245 |
+
{"label": "Median", "value": "median"},
|
246 |
+
{"label": "Max", "value": "max"},
|
247 |
+
{"label": "Min", "value": "min"},
|
248 |
+
{"label": "Sum", "value": "sum"},
|
249 |
+
{"label": "std", "value": "std"},
|
250 |
+
{"label": "var", "value": "var"},
|
251 |
+
],
|
252 |
+
value="mean",
|
253 |
+
),
|
254 |
+
],
|
255 |
+
className="col-lg-6",
|
256 |
+
style={"padding": "1%"},
|
257 |
+
),
|
258 |
+
],
|
259 |
+
className="row",
|
260 |
+
# style = {'padding-top':'1%'}
|
261 |
+
),
|
262 |
+
html.Div(
|
263 |
+
[
|
264 |
+
html.Button("Generate Images", id="generate-button"),
|
265 |
+
html.Br(),
|
266 |
+
html.Hr(),
|
267 |
+
],
|
268 |
+
className="col-lg-12",
|
269 |
+
style={"margin-bottom": "1%"},
|
270 |
+
),
|
271 |
+
html.Div(
|
272 |
+
[
|
273 |
+
html.H3("B"),
|
274 |
+
],
|
275 |
+
className="col-lg-12",
|
276 |
+
style={"padding-top": "1%", "padding-left": "1%"},
|
277 |
+
),
|
278 |
+
html.Div(
|
279 |
+
[
|
280 |
+
html.Div(
|
281 |
+
[
|
282 |
+
html.P("Select visualisation name:"),
|
283 |
+
dcc.Dropdown(id="visualisation-select"),
|
284 |
+
],
|
285 |
+
className="col-lg-6",
|
286 |
+
style={"padding": "1%"},
|
287 |
+
),
|
288 |
+
html.Div(
|
289 |
+
[
|
290 |
+
html.P("Select your palette:"),
|
291 |
+
dcc.Dropdown(
|
292 |
+
id="platter-dropdown", options=colorscales, value="viridis"
|
293 |
+
),
|
294 |
+
],
|
295 |
+
className="col-lg-6",
|
296 |
+
style={"padding": "1%"},
|
297 |
+
),
|
298 |
+
],
|
299 |
+
className="row",
|
300 |
+
# style = {'padding-top':'1%'}
|
301 |
+
),
|
302 |
+
html.Div(
|
303 |
+
[
|
304 |
+
html.Hr(),
|
305 |
+
html.H3("Results"),
|
306 |
+
dcc.Graph(id="graph"),
|
307 |
+
],
|
308 |
+
className="col-lg-12",
|
309 |
+
style={"padding-top": "1%"},
|
310 |
+
),
|
311 |
+
# html.Div(
|
312 |
+
# className="app-footer",
|
313 |
+
# children=[
|
314 |
+
# html.Div(f"Copyright @ {today.strftime('%Y')} Sydney Informatics Hub (SIH)", className="app-footer--copyright")
|
315 |
+
# ]
|
316 |
+
# ),
|
317 |
+
],
|
318 |
+
className="container-fluid",
|
319 |
+
)
|
320 |
+
|
321 |
+
# ==================================================================================================
|
322 |
+
# Functions
|
323 |
+
# ==================================================================================================
|
324 |
+
|
325 |
+
|
326 |
+
def find_analyses(path):
|
327 |
+
"""Find all the analysis files in a directory.
|
328 |
+
|
329 |
+
Parameters
|
330 |
+
----------
|
331 |
+
path: str
|
332 |
+
Path to the directory containing the analysis files
|
333 |
+
|
334 |
+
Returns
|
335 |
+
-------
|
336 |
+
files: list
|
337 |
+
List of analysis files
|
338 |
+
"""
|
339 |
+
files = [f for f in os.listdir(path) if f.endswith(".tif")]
|
340 |
+
return files
|
341 |
+
|
342 |
+
|
343 |
+
def open_image(path):
|
344 |
+
"""Open a raster image and return the data and coordinates.
|
345 |
+
|
346 |
+
Parameters
|
347 |
+
----------
|
348 |
+
path: str
|
349 |
+
path to the raster image
|
350 |
+
|
351 |
+
Returns
|
352 |
+
-------
|
353 |
+
band1: np.array
|
354 |
+
The raster data
|
355 |
+
lons: np.array
|
356 |
+
The longitude coordinates
|
357 |
+
lats: np.array
|
358 |
+
The latitude coordinates
|
359 |
+
"""
|
360 |
+
with rasterio.open(path) as src:
|
361 |
+
band1 = src.read(1)
|
362 |
+
print("Band1 has shape", band1.shape)
|
363 |
+
height = band1.shape[0]
|
364 |
+
width = band1.shape[1]
|
365 |
+
cols, rows = np.meshgrid(np.arange(width), np.arange(height))
|
366 |
+
xs, ys = rasterio.transform.xy(src.transform, rows, cols)
|
367 |
+
lons = np.array(xs)
|
368 |
+
lats = np.array(ys)
|
369 |
+
|
370 |
+
return band1, lons, lats
|
371 |
+
|
372 |
+
|
373 |
+
def perform_analysis(
|
374 |
+
input,
|
375 |
+
window_start,
|
376 |
+
window_end,
|
377 |
+
historic_years: int,
|
378 |
+
layer: str,
|
379 |
+
match_raster: str = None,
|
380 |
+
output: str = None,
|
381 |
+
agg_history: str = "mean",
|
382 |
+
agg_window: str = "mean",
|
383 |
+
comparison: str = "diff",
|
384 |
+
**args,
|
385 |
+
) -> Dict[str, str]:
|
386 |
+
"""Perform the analysis.
|
387 |
+
|
388 |
+
This is a wrapper function for the analysis module. It takes the input parameters and passes them to the analysis module.
|
389 |
+
|
390 |
+
Parameters
|
391 |
+
----------
|
392 |
+
input : str
|
393 |
+
path to the input data
|
394 |
+
window_start : str
|
395 |
+
start date of the window
|
396 |
+
window_end : str
|
397 |
+
end date of the window
|
398 |
+
historic_years : int
|
399 |
+
number of years to use for the historic data
|
400 |
+
layer : str
|
401 |
+
layer to use for the analysis
|
402 |
+
match_raster : str, optional
|
403 |
+
path to the raster to match the output to, by default None
|
404 |
+
output : str, optional
|
405 |
+
path to the output file, by default None
|
406 |
+
agg_history : str, optional
|
407 |
+
aggregation method for the historic data, by default "mean"
|
408 |
+
agg_window : str, optional
|
409 |
+
aggregation method for the window data, by default "mean"
|
410 |
+
comparison : str, optional
|
411 |
+
comparison method for the window and historic data, by default "diff"
|
412 |
+
|
413 |
+
Returns
|
414 |
+
-------
|
415 |
+
files: dict
|
416 |
+
Dict of analysis files
|
417 |
+
"""
|
418 |
+
files = analyse(
|
419 |
+
input=input,
|
420 |
+
window_start=window_start,
|
421 |
+
window_end=window_end,
|
422 |
+
historic_years=historic_years,
|
423 |
+
agg_window=agg_window,
|
424 |
+
agg_history=agg_history,
|
425 |
+
comparison=comparison,
|
426 |
+
layer=layer,
|
427 |
+
output=output,
|
428 |
+
match_raster=match_raster,
|
429 |
+
)
|
430 |
+
return files
|
431 |
+
|
432 |
+
|
433 |
+
# ====================================================================================================
|
434 |
+
# Callbacks
|
435 |
+
# ====================================================================================================
|
436 |
+
|
437 |
+
|
438 |
+
@app.callback(
|
439 |
+
[
|
440 |
+
Output("farm-name-session", "data"),
|
441 |
+
Output("farm-image", "src"),
|
442 |
+
],
|
443 |
+
[Input("farm-name", "value"), State("farm-name-session", "data")],
|
444 |
+
)
|
445 |
+
def update_session(farm_name, session):
|
446 |
+
session = farm_name
|
447 |
+
if farm_name is None or farm_name == "":
|
448 |
+
session = ""
|
449 |
+
image = WAIT_IMAGE
|
450 |
+
else:
|
451 |
+
print(f"Getting some data about farm: {farm_name}")
|
452 |
+
|
453 |
+
# if the path does not exist, do not update the session
|
454 |
+
real_path = INPUT.format(farm_name)
|
455 |
+
print(f"Checking {real_path}")
|
456 |
+
if os.path.exists(real_path):
|
457 |
+
session = farm_name
|
458 |
+
image = SUCCESS_IMAGE
|
459 |
+
else:
|
460 |
+
session = ""
|
461 |
+
image = FAIL_IMAGE
|
462 |
+
|
463 |
+
print(f"\n\nSession updated to {session}")
|
464 |
+
print(f"Image updated to {image}\n\n")
|
465 |
+
|
466 |
+
return session, image
|
467 |
+
|
468 |
+
|
469 |
+
@app.callback(
|
470 |
+
Output("farm-name", "value"),
|
471 |
+
Input("farm-name-session", "modified_timestamp"),
|
472 |
+
State("farm-name-session", "data"),
|
473 |
+
)
|
474 |
+
def display_name_from_session(timestamp, name):
|
475 |
+
print(f"Updating the farm name from the session: {name}")
|
476 |
+
if timestamp is not None:
|
477 |
+
return name
|
478 |
+
else:
|
479 |
+
return ""
|
480 |
+
|
481 |
+
|
482 |
+
@app.callback(
|
483 |
+
Output("visualisation-select", "options"),
|
484 |
+
# Input("farm-name", "value"),
|
485 |
+
Input("layer-dropdown", "value"),
|
486 |
+
Input("window-select", "start_date"),
|
487 |
+
Input("window-select", "end_date"),
|
488 |
+
Input("historic-dropdown", "value"),
|
489 |
+
Input("w-aggregation-dropdown", "value"),
|
490 |
+
Input("h-aggregation-dropdown", "value"),
|
491 |
+
Input("generate-button", "n_clicks"),
|
492 |
+
State("farm-name-session", "data"),
|
493 |
+
)
|
494 |
+
def get_analysis(
|
495 |
+
layer, window_start, window_end, historic_years, w_agg, h_agg, n_clicks, farm_name
|
496 |
+
) -> List[Dict[str, str]]:
|
497 |
+
"""Get the analysis files and return them as a list of dicts.
|
498 |
+
|
499 |
+
Parameters
|
500 |
+
----------
|
501 |
+
layer : str
|
502 |
+
layer to use for the analysis
|
503 |
+
window_start : str
|
504 |
+
start date of the window
|
505 |
+
window_end : str
|
506 |
+
end date of the window
|
507 |
+
historic_years : int
|
508 |
+
number of years to use for the historic data
|
509 |
+
w_agg : str
|
510 |
+
aggregation method for the window data
|
511 |
+
h_agg : str
|
512 |
+
aggregation method for the historic data
|
513 |
+
n_clicks : int
|
514 |
+
number of times the generate button has been clicked
|
515 |
+
|
516 |
+
Returns
|
517 |
+
-------
|
518 |
+
files : list
|
519 |
+
list of dicts of analysis files
|
520 |
+
"""
|
521 |
+
print("\nAnalysis callback triggered")
|
522 |
+
|
523 |
+
if n_clicks == 0 or n_clicks is None:
|
524 |
+
raise PreventUpdate
|
525 |
+
|
526 |
+
path = f"/home/sahand/Data/results_default/{farm_name}/soilwatermodel"
|
527 |
+
# window_start = datetime.datetime.strptime(window_start, '%Y-%m-%d')
|
528 |
+
# window_end = datetime.datetime.strptime(window_end, '%Y-%m-%d')
|
529 |
+
print(f"\nPath: {path}\n")
|
530 |
+
|
531 |
+
files = perform_analysis(
|
532 |
+
input=path,
|
533 |
+
window_start=window_start,
|
534 |
+
window_end=window_end,
|
535 |
+
historic_years=historic_years,
|
536 |
+
layer=layer,
|
537 |
+
agg_window=w_agg,
|
538 |
+
agg_history=h_agg,
|
539 |
+
comparison="diff",
|
540 |
+
output=None,
|
541 |
+
match_raster=None,
|
542 |
+
)
|
543 |
+
print(path)
|
544 |
+
print(
|
545 |
+
f"n_clicks: {n_clicks}\n"
|
546 |
+
+ f"window_start: {window_start}\n"
|
547 |
+
+ f"window_end: {window_end}\n"
|
548 |
+
+ f"historic_years: {historic_years}\n"
|
549 |
+
+ f"layer: {layer}\n"
|
550 |
+
+ f"agg_window: {w_agg}\n"
|
551 |
+
+ f"agg_history: {h_agg}\n"
|
552 |
+
+ "comparison: 'diff'\n"
|
553 |
+
+ f"output: {None}\n"
|
554 |
+
+ f"match_raster: {None}\n"
|
555 |
+
)
|
556 |
+
print(files)
|
557 |
+
files = {
|
558 |
+
i: [
|
559 |
+
" ".join(files[i].split("/")[-1].split(".")[0].split("-")).capitalize(),
|
560 |
+
files[i],
|
561 |
+
]
|
562 |
+
for i in files
|
563 |
+
}
|
564 |
+
print(files)
|
565 |
+
options = [{"label": files[i][0], "value": files[i][1]} for i in files]
|
566 |
+
|
567 |
+
return options
|
568 |
+
|
569 |
+
|
570 |
+
@app.callback(
|
571 |
+
Output("graph", "figure"),
|
572 |
+
Input("visualisation-select", "value"),
|
573 |
+
Input("platter-dropdown", "value"),
|
574 |
+
)
|
575 |
+
def change_colorscale(file, palette):
|
576 |
+
"""Display the selected visualisation and change the colorscale of the
|
577 |
+
visualisation.
|
578 |
+
|
579 |
+
Parameters
|
580 |
+
----------
|
581 |
+
file : str
|
582 |
+
path to the visualisation file
|
583 |
+
palette : str
|
584 |
+
name of the colorscale to use
|
585 |
+
|
586 |
+
Returns
|
587 |
+
-------
|
588 |
+
fig : plotly.graph_objects.Figure
|
589 |
+
plotly figure object
|
590 |
+
"""
|
591 |
+
|
592 |
+
band1, lons_a, lats_a = open_image(file)
|
593 |
+
|
594 |
+
# Get the second dimension of the lons
|
595 |
+
lats = lats_a[:, 0]
|
596 |
+
lons = lons_a[0, :]
|
597 |
+
|
598 |
+
print(lons.shape, lons)
|
599 |
+
print(lats.shape, lats)
|
600 |
+
print(band1.shape, band1)
|
601 |
+
|
602 |
+
fig = px.imshow(band1, x=lons, y=lats, color_continuous_scale=palette)
|
603 |
+
fig.update(
|
604 |
+
data=[
|
605 |
+
{
|
606 |
+
"customdata": np.stack((band1, lats_a, lons_a), axis=-1),
|
607 |
+
"hovertemplate": "<b>SM</b>: %{customdata[0]}<br>"
|
608 |
+
+ "<b>Lat</b>: %{customdata[1]}<br>"
|
609 |
+
+ "<b>Lon</b>: %{customdata[2]}<br>"
|
610 |
+
+ "<extra></extra>",
|
611 |
+
}
|
612 |
+
]
|
613 |
+
)
|
614 |
+
print("Render successful")
|
615 |
+
return fig
|
616 |
+
|
617 |
+
|
618 |
+
# ==============================================================================
|
619 |
+
# Main
|
620 |
+
# ==============================================================================
|
621 |
+
|
622 |
+
if __name__ == "__main__":
|
623 |
+
# Load Configs
|
624 |
+
parser = argparse.ArgumentParser(
|
625 |
+
description="Download rainfall data from Google Earth Engine for a range of dates.",
|
626 |
+
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
|
627 |
+
)
|
628 |
+
parser.add_argument(
|
629 |
+
"-i",
|
630 |
+
"--input",
|
631 |
+
help="Absolute or relative path to the netcdf data directory for each farm. Should be in this format: '/path/to/farm/{}/soilwatermodel'",
|
632 |
+
default=os.path.join(
|
633 |
+
os.path.expanduser("~"), "Data/results_default/{}/soilwatermodel"
|
634 |
+
),
|
635 |
+
)
|
636 |
+
|
637 |
+
args = parser.parse_args()
|
638 |
+
INPUT = args.input
|
639 |
+
|
640 |
+
try:
|
641 |
+
app.run_server(debug=True)
|
642 |
+
except Exception as e:
|
643 |
+
vprint(
|
644 |
+
0,
|
645 |
+
V,
|
646 |
+
V_IGNORE,
|
647 |
+
Error="Failed to execute the main function:",
|
648 |
+
ErrorMessage=e,
|
649 |
+
)
|
650 |
+
traceback.print_exc()
|
651 |
+
raise e
|
scripts/sankey.py
ADDED
@@ -0,0 +1,185 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
# import json
|
3 |
+
# import urllib
|
4 |
+
|
5 |
+
import plotly.graph_objects as go
|
6 |
+
from dash import Dash, Input, Output, dcc, html
|
7 |
+
from dash.exceptions import PreventUpdate
|
8 |
+
|
9 |
+
app = Dash(__name__)
|
10 |
+
|
11 |
+
app.layout = html.Div(
|
12 |
+
[
|
13 |
+
html.H1("Sankey diagram"),
|
14 |
+
html.P("Data"),
|
15 |
+
dcc.Input(id="SM1", placeholder="SM1"),
|
16 |
+
dcc.Input(id="SM2", placeholder="SM2"),
|
17 |
+
dcc.Input(id="SM3", placeholder="SM3"),
|
18 |
+
dcc.Input(id="SM4", placeholder="SM4"),
|
19 |
+
dcc.Input(id="SM5", placeholder="SM5"),
|
20 |
+
dcc.Input(id="BS1", placeholder="BS1"),
|
21 |
+
dcc.Input(id="BS2", placeholder="BS2"),
|
22 |
+
dcc.Input(id="BS3", placeholder="BS3"),
|
23 |
+
dcc.Input(id="BS4", placeholder="BS4"),
|
24 |
+
dcc.Input(id="BS5", placeholder="BS5"),
|
25 |
+
dcc.Input(id="rainfall", placeholder="Rainfall"),
|
26 |
+
dcc.Input(id="et", placeholder="ET"),
|
27 |
+
html.Button("Calculate", id="submit"),
|
28 |
+
html.H4("Water conservation diagram"),
|
29 |
+
dcc.Graph(id="graph"),
|
30 |
+
html.P("Opacity"),
|
31 |
+
dcc.Slider(id="slider", min=0, max=1, value=0.5, step=0.1),
|
32 |
+
]
|
33 |
+
)
|
34 |
+
|
35 |
+
|
36 |
+
def get_excess_clipped_moist_ETb(moisture, bucket_size):
|
37 |
+
excess = 0 if moisture <= bucket_size else moisture - bucket_size
|
38 |
+
moist_out = moisture if moisture <= bucket_size else bucket_size
|
39 |
+
moist_out = moist_out if moisture >= 0 else 0
|
40 |
+
ETb = 0 if moisture >= 0 else -moisture
|
41 |
+
return ETb, moist_out, excess
|
42 |
+
|
43 |
+
|
44 |
+
def model(prev_state, R, ET, buckets):
|
45 |
+
old = {x + "_prev": y for x, y in prev_state.items()}
|
46 |
+
|
47 |
+
# layer 0 to 5 cm
|
48 |
+
SM1 = 0.2 * old["SM1_prev"] + R - 0.5 * ET
|
49 |
+
Exc1, SM1, ETb1 = get_excess_clipped_moist_ETb(SM1, buckets["BS1"])
|
50 |
+
|
51 |
+
# layer 5 to 15 cm
|
52 |
+
SM2 = 0.95 * old["SM2_prev"] + 0.8 * old["SM1_prev"] - 0.2 * ETb1 + Exc1
|
53 |
+
Exc2, SM2, ETb2 = get_excess_clipped_moist_ETb(SM2, buckets["BS2"])
|
54 |
+
|
55 |
+
# Run Off
|
56 |
+
if (SM1 > buckets["BS1"]) * (SM2 > buckets["BS2"]):
|
57 |
+
Exc2 = 0
|
58 |
+
run_off = Exc2
|
59 |
+
else:
|
60 |
+
run_off = 0
|
61 |
+
|
62 |
+
# layer 15 to 30 cm
|
63 |
+
SM3 = 0.95 * old["SM3_prev"] + 0.05 * old["SM2_prev"] - 0.15 * ETb2 + Exc2
|
64 |
+
Exc3, SM3, ETb3 = get_excess_clipped_moist_ETb(SM3, buckets["BS3"])
|
65 |
+
|
66 |
+
# layer 30 to 60 cm
|
67 |
+
SM4 = 0.95 * old["SM4_prev"] + 0.05 * old["SM3_prev"] - 0.1 * ETb3 + Exc3
|
68 |
+
Exc4, SM4, ETb4 = get_excess_clipped_moist_ETb(SM4, buckets["BS4"])
|
69 |
+
|
70 |
+
# layer 60 to 100 cm
|
71 |
+
SM5 = 0.99 * old["SM5_prev"] + 0.01 * old["SM4_prev"] - 0.05 * ETb4 + Exc4
|
72 |
+
Exc5, SM5, ETb5 = get_excess_clipped_moist_ETb(SM5, buckets["BS5"])
|
73 |
+
|
74 |
+
DD = 0.01 * old["SM5_prev"] + Exc5
|
75 |
+
|
76 |
+
return [
|
77 |
+
{
|
78 |
+
"SM1": SM1,
|
79 |
+
"SM2": SM2,
|
80 |
+
"SM3": SM3,
|
81 |
+
"SM4": SM4,
|
82 |
+
"SM5": SM5,
|
83 |
+
"DD": DD,
|
84 |
+
},
|
85 |
+
{
|
86 |
+
"ETb1": ETb1,
|
87 |
+
"ETb2": ETb2,
|
88 |
+
"ETb3": ETb3,
|
89 |
+
"ETb4": ETb4,
|
90 |
+
"ETb5": ETb5,
|
91 |
+
},
|
92 |
+
{
|
93 |
+
"Exc1": Exc1,
|
94 |
+
"Exc2": Exc2,
|
95 |
+
"Exc3": Exc3,
|
96 |
+
"Exc4": Exc4,
|
97 |
+
"Exc5": Exc5,
|
98 |
+
},
|
99 |
+
run_off,
|
100 |
+
]
|
101 |
+
|
102 |
+
|
103 |
+
@app.callback(
|
104 |
+
Output("graph", "figure"),
|
105 |
+
Input("slider", "value"),
|
106 |
+
Input("SM1", "value"),
|
107 |
+
Input("SM2", "value"),
|
108 |
+
Input("SM3", "value"),
|
109 |
+
Input("SM4", "value"),
|
110 |
+
Input("SM5", "value"),
|
111 |
+
Input("BS1", "value"),
|
112 |
+
Input("BS2", "value"),
|
113 |
+
Input("BS3", "value"),
|
114 |
+
Input("BS4", "value"),
|
115 |
+
Input("BS5", "value"),
|
116 |
+
Input("rainfall", "value"),
|
117 |
+
Input("et", "value"),
|
118 |
+
Input("submit", "n_clicks"),
|
119 |
+
)
|
120 |
+
def display_sankey(
|
121 |
+
opacity, SM1, SM2, SM3, SM4, SM5, BS1, BS2, BS3, BS4, BS5, R, ET, submit
|
122 |
+
):
|
123 |
+
if submit == 0 or submit is None:
|
124 |
+
raise PreventUpdate
|
125 |
+
buckets = {
|
126 |
+
"BS1": BS1,
|
127 |
+
"BS2": BS2,
|
128 |
+
"BS3": BS3,
|
129 |
+
"BS4": BS4,
|
130 |
+
"BS5": BS5,
|
131 |
+
}
|
132 |
+
prev_state = {
|
133 |
+
"SM1": SM1,
|
134 |
+
"SM2": SM2,
|
135 |
+
"SM3": SM3,
|
136 |
+
"SM4": SM4,
|
137 |
+
"SM5": SM5,
|
138 |
+
}
|
139 |
+
SMs, ETbs, Excs, run_off = model(prev_state, R, ET, buckets)
|
140 |
+
|
141 |
+
node = dict(
|
142 |
+
pad=15,
|
143 |
+
thickness=20,
|
144 |
+
line=dict(color="black", width=0.5),
|
145 |
+
label=[
|
146 |
+
"SM1_prev",
|
147 |
+
"SM2_prev",
|
148 |
+
"SM3_prev",
|
149 |
+
"SM4_prev",
|
150 |
+
"SM5_prev", # 0-4
|
151 |
+
"SM1",
|
152 |
+
"SM2",
|
153 |
+
"SM3",
|
154 |
+
"SM4",
|
155 |
+
"SM5", # 5-9
|
156 |
+
"ETb1",
|
157 |
+
"ETb2",
|
158 |
+
"ETb3",
|
159 |
+
"ETb4",
|
160 |
+
"ETb5", # 10-14
|
161 |
+
"Exc1",
|
162 |
+
"Exc2",
|
163 |
+
"Exc3",
|
164 |
+
"Exc4",
|
165 |
+
"Exc5", # 15-19
|
166 |
+
"R",
|
167 |
+
"ET",
|
168 |
+
"DD",
|
169 |
+
"runoff",
|
170 |
+
], # 20, 21, 22, 23
|
171 |
+
color="blue",
|
172 |
+
)
|
173 |
+
|
174 |
+
link = dict(
|
175 |
+
source=[0, 0, 1, 1, 2, 2, 3, 3, 4, 4], # indices correspond to labels
|
176 |
+
target=[5, 6, 6, 7, 7, 8, 8, 9, 9, 22],
|
177 |
+
value=[],
|
178 |
+
)
|
179 |
+
|
180 |
+
fig = go.Figure(go.Sankey(link=link, node=node))
|
181 |
+
fig.update_layout(font_size=10)
|
182 |
+
return fig
|
183 |
+
|
184 |
+
|
185 |
+
app.run_server(debug=True)
|
scripts/simple_html.py
ADDED
@@ -0,0 +1,391 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python
|
2 |
+
# -*- coding: utf-8 -*-
|
3 |
+
import argparse
|
4 |
+
import os
|
5 |
+
|
6 |
+
import numpy as np
|
7 |
+
import plotly.express as px
|
8 |
+
import plotly.graph_objs as go
|
9 |
+
import plotly.io as pio
|
10 |
+
import rasterio
|
11 |
+
from matplotlib.colors import LightSource, LinearSegmentedColormap
|
12 |
+
|
13 |
+
|
14 |
+
def main(f_path, f_name, farm_name):
|
15 |
+
"""Main function to visualise the soil moisture content for a given layer,
|
16 |
+
year file, and days.
|
17 |
+
|
18 |
+
Parameters
|
19 |
+
----------
|
20 |
+
f_path : str
|
21 |
+
Path to the file.
|
22 |
+
f_name : str
|
23 |
+
Name of the file.
|
24 |
+
farm_name : str
|
25 |
+
Name of the farm.
|
26 |
+
|
27 |
+
Returns
|
28 |
+
-------
|
29 |
+
None
|
30 |
+
"""
|
31 |
+
|
32 |
+
# f_path = ""#"/home/sahand/Projects/PIPE-3788 GRDC SoilWaterNow Deployment/work/v3/Arawa 2019-2023/c8/1af25ced023e58c46f4403a155210d/soilwatermodel v3/analysis"
|
33 |
+
# f_name = ""#"delta-2022-12-20_2023-01-10-SM2-w_mean-h_mean.tif"
|
34 |
+
file = os.path.join(f_path, f_name)
|
35 |
+
# farm_name = "Arawa"
|
36 |
+
# layer = "SM2"
|
37 |
+
|
38 |
+
with rasterio.open(file) as src:
|
39 |
+
band1 = src.read(1)
|
40 |
+
print("Band1 has shape", band1.shape)
|
41 |
+
height = band1.shape[0]
|
42 |
+
width = band1.shape[1]
|
43 |
+
cols, rows = np.meshgrid(np.arange(width), np.arange(height))
|
44 |
+
xs, ys = rasterio.transform.xy(src.transform, rows, cols)
|
45 |
+
lons = np.array(xs)
|
46 |
+
lats = np.array(ys)
|
47 |
+
|
48 |
+
bamako = [
|
49 |
+
[0.0, "rgb(0, 63, 76)"], # a scientific colorscale for dem data
|
50 |
+
[0.1, "rgb(29, 81, 59)"],
|
51 |
+
[0.2, "rgb(55, 98, 43)"],
|
52 |
+
[0.3, "rgb(79, 114, 30)"],
|
53 |
+
[0.4, "rgb(103, 129, 16)"],
|
54 |
+
[0.5, "rgb(136, 142, 2)"],
|
55 |
+
[0.6, "rgb(169, 154, 21)"],
|
56 |
+
[0.7, "rgb(192, 171, 45)"],
|
57 |
+
[0.8, "rgb(214, 188, 74)"],
|
58 |
+
[0.9, "rgb(234, 209, 112)"],
|
59 |
+
[1.0, "rgb(254, 229, 152)"],
|
60 |
+
]
|
61 |
+
|
62 |
+
# fig= px.imshow(band1, color_continuous_scale=bamako)
|
63 |
+
# my_layout= dict(title_text=f"{farm_name}--{f_name.split('.')[0]}", title_x=0.5, width =700, height=500, template='none',
|
64 |
+
# coloraxis_colorbar=dict(len=0.75, thickness=25))
|
65 |
+
# fig.update_layout(**my_layout)
|
66 |
+
|
67 |
+
ve = 1
|
68 |
+
ls = LightSource(azdeg=315, altdeg=45)
|
69 |
+
hilsh = ls.hillshade(band1, vert_exag=ve) # , dx=dx, dy=dy)
|
70 |
+
fig = px.imshow(hilsh, color_continuous_scale=bamako)
|
71 |
+
|
72 |
+
# fig.update_traces(customdata=elevation, hovertemplate='moisture: %{customdata}<extra></extra>')
|
73 |
+
# fig.update_layout(**my_layout)
|
74 |
+
# fig.show()#
|
75 |
+
|
76 |
+
cm_data = [
|
77 |
+
[0.0011753, 0.25004, 0.3],
|
78 |
+
[0.0039003, 0.25157, 0.29861],
|
79 |
+
[0.006602, 0.25305, 0.29722],
|
80 |
+
[0.0092914, 0.25456, 0.29581],
|
81 |
+
[0.012175, 0.25604, 0.2944],
|
82 |
+
[0.014801, 0.25755, 0.293],
|
83 |
+
[0.01745, 0.25907, 0.29161],
|
84 |
+
[0.020096, 0.26057, 0.29018],
|
85 |
+
[0.022743, 0.26208, 0.28878],
|
86 |
+
[0.025398, 0.26361, 0.28735],
|
87 |
+
[0.028064, 0.26511, 0.28592],
|
88 |
+
[0.030746, 0.26664, 0.2845],
|
89 |
+
[0.033437, 0.26816, 0.28309],
|
90 |
+
[0.036369, 0.26971, 0.28165],
|
91 |
+
[0.039136, 0.27124, 0.28021],
|
92 |
+
[0.041941, 0.27278, 0.27878],
|
93 |
+
[0.044597, 0.27432, 0.27734],
|
94 |
+
[0.047216, 0.27589, 0.27591],
|
95 |
+
[0.049816, 0.27743, 0.27443],
|
96 |
+
[0.052305, 0.279, 0.27298],
|
97 |
+
[0.054846, 0.28053, 0.27151],
|
98 |
+
[0.057331, 0.28213, 0.27007],
|
99 |
+
[0.059903, 0.28369, 0.2686],
|
100 |
+
[0.062324, 0.28526, 0.2671],
|
101 |
+
[0.064737, 0.28684, 0.26564],
|
102 |
+
[0.067177, 0.28844, 0.26416],
|
103 |
+
[0.069647, 0.29003, 0.26267],
|
104 |
+
[0.072007, 0.29165, 0.26118],
|
105 |
+
[0.074435, 0.29324, 0.25969],
|
106 |
+
[0.076803, 0.29484, 0.25817],
|
107 |
+
[0.079232, 0.29647, 0.25667],
|
108 |
+
[0.081738, 0.29808, 0.25518],
|
109 |
+
[0.084163, 0.29972, 0.25365],
|
110 |
+
[0.086556, 0.30134, 0.25213],
|
111 |
+
[0.089008, 0.30298, 0.25059],
|
112 |
+
[0.091487, 0.30463, 0.24907],
|
113 |
+
[0.093906, 0.3063, 0.24754],
|
114 |
+
[0.096327, 0.30793, 0.24598],
|
115 |
+
[0.098845, 0.30961, 0.24443],
|
116 |
+
[0.10132, 0.31127, 0.24288],
|
117 |
+
[0.10384, 0.31294, 0.24134],
|
118 |
+
[0.10637, 0.31461, 0.23978],
|
119 |
+
[0.10884, 0.31632, 0.23821],
|
120 |
+
[0.1114, 0.318, 0.23666],
|
121 |
+
[0.11391, 0.3197, 0.23509],
|
122 |
+
[0.11648, 0.32139, 0.23347],
|
123 |
+
[0.11904, 0.32311, 0.23191],
|
124 |
+
[0.12156, 0.32481, 0.2303],
|
125 |
+
[0.12416, 0.32653, 0.22873],
|
126 |
+
[0.12676, 0.32826, 0.22712],
|
127 |
+
[0.12938, 0.33, 0.22551],
|
128 |
+
[0.132, 0.33173, 0.22389],
|
129 |
+
[0.13465, 0.33348, 0.22228],
|
130 |
+
[0.13728, 0.33524, 0.22068],
|
131 |
+
[0.13988, 0.33699, 0.21904],
|
132 |
+
[0.14258, 0.33874, 0.21741],
|
133 |
+
[0.14528, 0.34051, 0.21578],
|
134 |
+
[0.14793, 0.34229, 0.21413],
|
135 |
+
[0.15062, 0.34406, 0.21248],
|
136 |
+
[0.15331, 0.34584, 0.21083],
|
137 |
+
[0.15607, 0.34765, 0.20917],
|
138 |
+
[0.15879, 0.34944, 0.20751],
|
139 |
+
[0.16154, 0.35125, 0.20585],
|
140 |
+
[0.16429, 0.35306, 0.20419],
|
141 |
+
[0.16705, 0.35489, 0.20248],
|
142 |
+
[0.16983, 0.35671, 0.20078],
|
143 |
+
[0.17259, 0.35853, 0.1991],
|
144 |
+
[0.1754, 0.36036, 0.19742],
|
145 |
+
[0.17824, 0.36222, 0.19572],
|
146 |
+
[0.18102, 0.36406, 0.19403],
|
147 |
+
[0.18387, 0.36593, 0.1923],
|
148 |
+
[0.18672, 0.36779, 0.19058],
|
149 |
+
[0.18959, 0.36966, 0.18887],
|
150 |
+
[0.19246, 0.37154, 0.18714],
|
151 |
+
[0.19535, 0.37344, 0.18542],
|
152 |
+
[0.19825, 0.37533, 0.18364],
|
153 |
+
[0.20116, 0.37724, 0.18189],
|
154 |
+
[0.20413, 0.37915, 0.18014],
|
155 |
+
[0.20706, 0.38108, 0.17841],
|
156 |
+
[0.21001, 0.383, 0.1766],
|
157 |
+
[0.21297, 0.38493, 0.17486],
|
158 |
+
[0.21599, 0.38688, 0.17306],
|
159 |
+
[0.21898, 0.38884, 0.17127],
|
160 |
+
[0.22199, 0.39081, 0.1695],
|
161 |
+
[0.22501, 0.39277, 0.16768],
|
162 |
+
[0.22809, 0.39476, 0.16585],
|
163 |
+
[0.23115, 0.39674, 0.16407],
|
164 |
+
[0.23423, 0.39875, 0.16224],
|
165 |
+
[0.23735, 0.40078, 0.16037],
|
166 |
+
[0.24043, 0.40279, 0.15857],
|
167 |
+
[0.24357, 0.40482, 0.15671],
|
168 |
+
[0.24673, 0.40687, 0.15484],
|
169 |
+
[0.24989, 0.40893, 0.15296],
|
170 |
+
[0.2531, 0.411, 0.15109],
|
171 |
+
[0.25629, 0.41308, 0.14922],
|
172 |
+
[0.25954, 0.41517, 0.14733],
|
173 |
+
[0.26278, 0.41728, 0.14546],
|
174 |
+
[0.26606, 0.41939, 0.14352],
|
175 |
+
[0.26935, 0.42151, 0.14161],
|
176 |
+
[0.27265, 0.42366, 0.13963],
|
177 |
+
[0.276, 0.42581, 0.13775],
|
178 |
+
[0.27935, 0.42799, 0.13575],
|
179 |
+
[0.28274, 0.43017, 0.13382],
|
180 |
+
[0.28611, 0.43238, 0.13185],
|
181 |
+
[0.28956, 0.43459, 0.12989],
|
182 |
+
[0.293, 0.43682, 0.12788],
|
183 |
+
[0.29648, 0.43906, 0.12588],
|
184 |
+
[0.29998, 0.44132, 0.12382],
|
185 |
+
[0.3035, 0.44361, 0.12179],
|
186 |
+
[0.30706, 0.44589, 0.11977],
|
187 |
+
[0.31066, 0.44821, 0.11773],
|
188 |
+
[0.31426, 0.45054, 0.11568],
|
189 |
+
[0.3179, 0.45289, 0.11362],
|
190 |
+
[0.32156, 0.45525, 0.11153],
|
191 |
+
[0.32526, 0.45763, 0.10944],
|
192 |
+
[0.32901, 0.46003, 0.10731],
|
193 |
+
[0.33277, 0.46245, 0.10513],
|
194 |
+
[0.33654, 0.4649, 0.10304],
|
195 |
+
[0.34037, 0.46736, 0.10085],
|
196 |
+
[0.34422, 0.46984, 0.098693],
|
197 |
+
[0.34812, 0.47233, 0.096453],
|
198 |
+
[0.35204, 0.47486, 0.094325],
|
199 |
+
[0.35601, 0.47738, 0.092102],
|
200 |
+
[0.36, 0.47994, 0.089849],
|
201 |
+
[0.36403, 0.48251, 0.087617],
|
202 |
+
[0.36811, 0.4851, 0.085276],
|
203 |
+
[0.37221, 0.48772, 0.082993],
|
204 |
+
[0.37638, 0.49034, 0.080686],
|
205 |
+
[0.38057, 0.49296, 0.078304],
|
206 |
+
[0.3848, 0.49562, 0.075955],
|
207 |
+
[0.38909, 0.49829, 0.073656],
|
208 |
+
[0.39341, 0.50095, 0.071224],
|
209 |
+
[0.39779, 0.50362, 0.068802],
|
210 |
+
[0.4022, 0.5063, 0.066264],
|
211 |
+
[0.40668, 0.50898, 0.063774],
|
212 |
+
[0.4112, 0.51164, 0.061172],
|
213 |
+
[0.41578, 0.51431, 0.058651],
|
214 |
+
[0.4204, 0.51695, 0.05607],
|
215 |
+
[0.42509, 0.51956, 0.053359],
|
216 |
+
[0.42981, 0.52214, 0.050712],
|
217 |
+
[0.4346, 0.5247, 0.047969],
|
218 |
+
[0.43943, 0.52722, 0.045272],
|
219 |
+
[0.44432, 0.52967, 0.042483],
|
220 |
+
[0.44924, 0.53207, 0.039697],
|
221 |
+
[0.45421, 0.5344, 0.036906],
|
222 |
+
[0.45922, 0.53666, 0.034001],
|
223 |
+
[0.46428, 0.53884, 0.031435],
|
224 |
+
[0.46935, 0.54092, 0.028957],
|
225 |
+
[0.47444, 0.54291, 0.026597],
|
226 |
+
[0.47956, 0.5448, 0.024363],
|
227 |
+
[0.48468, 0.54659, 0.022265],
|
228 |
+
[0.48982, 0.54827, 0.020312],
|
229 |
+
[0.49497, 0.54985, 0.018512],
|
230 |
+
[0.5001, 0.55132, 0.016876],
|
231 |
+
[0.50524, 0.55269, 0.015412],
|
232 |
+
[0.51038, 0.55397, 0.014132],
|
233 |
+
[0.5155, 0.55517, 0.013033],
|
234 |
+
[0.52063, 0.55628, 0.01218],
|
235 |
+
[0.52575, 0.55735, 0.011416],
|
236 |
+
[0.53088, 0.55835, 0.010839],
|
237 |
+
[0.53601, 0.55933, 0.010585],
|
238 |
+
[0.54115, 0.5603, 0.010612],
|
239 |
+
[0.54633, 0.56126, 0.010945],
|
240 |
+
[0.55153, 0.56225, 0.011649],
|
241 |
+
[0.55677, 0.56328, 0.012516],
|
242 |
+
[0.56207, 0.56438, 0.01365],
|
243 |
+
[0.56742, 0.56555, 0.015118],
|
244 |
+
[0.57285, 0.56684, 0.016927],
|
245 |
+
[0.57835, 0.56824, 0.019098],
|
246 |
+
[0.58394, 0.56978, 0.021653],
|
247 |
+
[0.58963, 0.57149, 0.024617],
|
248 |
+
[0.59541, 0.57336, 0.028014],
|
249 |
+
[0.60129, 0.5754, 0.03187],
|
250 |
+
[0.60728, 0.57763, 0.036408],
|
251 |
+
[0.61335, 0.58006, 0.041287],
|
252 |
+
[0.61954, 0.58268, 0.046299],
|
253 |
+
[0.62581, 0.58551, 0.051565],
|
254 |
+
[0.63218, 0.58853, 0.057023],
|
255 |
+
[0.63862, 0.59176, 0.062625],
|
256 |
+
[0.64514, 0.59517, 0.068288],
|
257 |
+
[0.65173, 0.59876, 0.074092],
|
258 |
+
[0.65837, 0.60254, 0.079969],
|
259 |
+
[0.66506, 0.60647, 0.085984],
|
260 |
+
[0.6718, 0.61057, 0.0922],
|
261 |
+
[0.67856, 0.6148, 0.098456],
|
262 |
+
[0.68534, 0.61918, 0.10476],
|
263 |
+
[0.69213, 0.62366, 0.11129],
|
264 |
+
[0.69891, 0.62825, 0.11783],
|
265 |
+
[0.70569, 0.63293, 0.12447],
|
266 |
+
[0.71245, 0.63769, 0.13125],
|
267 |
+
[0.71918, 0.64252, 0.1381],
|
268 |
+
[0.72588, 0.6474, 0.14503],
|
269 |
+
[0.73253, 0.65232, 0.15201],
|
270 |
+
[0.73912, 0.65728, 0.15912],
|
271 |
+
[0.74564, 0.66226, 0.16629],
|
272 |
+
[0.7521, 0.66724, 0.17352],
|
273 |
+
[0.75847, 0.67223, 0.18082],
|
274 |
+
[0.76475, 0.67721, 0.18823],
|
275 |
+
[0.77094, 0.68215, 0.19562],
|
276 |
+
[0.77702, 0.68709, 0.20308],
|
277 |
+
[0.78299, 0.69199, 0.21059],
|
278 |
+
[0.78886, 0.69684, 0.21813],
|
279 |
+
[0.79459, 0.70164, 0.2257],
|
280 |
+
[0.80021, 0.7064, 0.23325],
|
281 |
+
[0.8057, 0.7111, 0.24084],
|
282 |
+
[0.81106, 0.71574, 0.24845],
|
283 |
+
[0.81631, 0.72031, 0.25601],
|
284 |
+
[0.82142, 0.72482, 0.26361],
|
285 |
+
[0.8264, 0.72926, 0.27115],
|
286 |
+
[0.83127, 0.73364, 0.2787],
|
287 |
+
[0.83602, 0.73795, 0.28619],
|
288 |
+
[0.84066, 0.74221, 0.29369],
|
289 |
+
[0.84519, 0.74639, 0.30114],
|
290 |
+
[0.84961, 0.75052, 0.30859],
|
291 |
+
[0.85394, 0.75459, 0.31598],
|
292 |
+
[0.85818, 0.7586, 0.32333],
|
293 |
+
[0.86234, 0.76255, 0.33065],
|
294 |
+
[0.86641, 0.76646, 0.33794],
|
295 |
+
[0.87042, 0.77032, 0.3452],
|
296 |
+
[0.87436, 0.77414, 0.35241],
|
297 |
+
[0.87824, 0.77792, 0.3596],
|
298 |
+
[0.88207, 0.78167, 0.36676],
|
299 |
+
[0.88585, 0.78537, 0.37388],
|
300 |
+
[0.88958, 0.78905, 0.38098],
|
301 |
+
[0.89328, 0.7927, 0.38804],
|
302 |
+
[0.89694, 0.79632, 0.39508],
|
303 |
+
[0.90057, 0.79992, 0.40209],
|
304 |
+
[0.90417, 0.8035, 0.40909],
|
305 |
+
[0.90775, 0.80706, 0.41606],
|
306 |
+
[0.9113, 0.8106, 0.423],
|
307 |
+
[0.91484, 0.81412, 0.42995],
|
308 |
+
[0.91835, 0.81764, 0.43686],
|
309 |
+
[0.92186, 0.82114, 0.44376],
|
310 |
+
[0.92534, 0.82462, 0.45064],
|
311 |
+
[0.92882, 0.8281, 0.45751],
|
312 |
+
[0.93228, 0.83157, 0.46438],
|
313 |
+
[0.93573, 0.83502, 0.47121],
|
314 |
+
[0.93918, 0.83847, 0.47804],
|
315 |
+
[0.94261, 0.84192, 0.48486],
|
316 |
+
[0.94603, 0.84535, 0.49169],
|
317 |
+
[0.94945, 0.84878, 0.49849],
|
318 |
+
[0.95286, 0.85221, 0.50528],
|
319 |
+
[0.95627, 0.85563, 0.51207],
|
320 |
+
[0.95966, 0.85904, 0.51886],
|
321 |
+
[0.96305, 0.86246, 0.52563],
|
322 |
+
[0.96643, 0.86586, 0.53241],
|
323 |
+
[0.96981, 0.86927, 0.53917],
|
324 |
+
[0.97318, 0.87268, 0.54594],
|
325 |
+
[0.97654, 0.87608, 0.55269],
|
326 |
+
[0.9799, 0.87948, 0.55945],
|
327 |
+
[0.98325, 0.88288, 0.5662],
|
328 |
+
[0.98659, 0.88628, 0.57296],
|
329 |
+
[0.98993, 0.88968, 0.57971],
|
330 |
+
[0.99326, 0.89308, 0.58645],
|
331 |
+
[0.99658, 0.89648, 0.5932],
|
332 |
+
[0.9999, 0.89988, 0.59995],
|
333 |
+
]
|
334 |
+
|
335 |
+
bamako_map = LinearSegmentedColormap.from_list("bamako", cm_data)
|
336 |
+
rgb = ls.shade(
|
337 |
+
band1, cmap=bamako_map, blend_mode="overlay", vert_exag=ve
|
338 |
+
) # , dx=dx, dy=dy)
|
339 |
+
img = np.array((255 * rgb[:, :, :3]), int)
|
340 |
+
|
341 |
+
fig = go.Figure(
|
342 |
+
go.Image(
|
343 |
+
z=img,
|
344 |
+
colormodel="rgb",
|
345 |
+
customdata=np.stack((band1, lats, lons), axis=-1),
|
346 |
+
hovertemplate="<b>SM</b>: %{customdata[0]}<br>"
|
347 |
+
+ "<b>Lat</b>: %{customdata[1]}<br>"
|
348 |
+
+ "<b>Lon</b>: %{customdata[2]}<br>"
|
349 |
+
+ "<extra></extra>",
|
350 |
+
)
|
351 |
+
)
|
352 |
+
fig.update_layout(
|
353 |
+
title_text=f"{farm_name}--{f_name.split('.')[0]}",
|
354 |
+
title_x=0.5,
|
355 |
+
width=700,
|
356 |
+
height=500,
|
357 |
+
template="none",
|
358 |
+
yaxis_autorange="reversed",
|
359 |
+
)
|
360 |
+
# fig.show()
|
361 |
+
pio.write_html(fig, file=f"{file.replace('.tif','')}plot.html", auto_open=True)
|
362 |
+
|
363 |
+
|
364 |
+
if __name__ == "__main__":
|
365 |
+
# Load Configs
|
366 |
+
parser = argparse.ArgumentParser(
|
367 |
+
description="Perform mass conservation analysis on the soilwatermodel logs. This should be called after running the model module with `-m True` argument.",
|
368 |
+
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
|
369 |
+
)
|
370 |
+
parser.add_argument(
|
371 |
+
"-f",
|
372 |
+
"--file",
|
373 |
+
help="file name",
|
374 |
+
)
|
375 |
+
parser.add_argument(
|
376 |
+
"-d",
|
377 |
+
"--dir",
|
378 |
+
help="directory name",
|
379 |
+
)
|
380 |
+
parser.add_argument(
|
381 |
+
"-n",
|
382 |
+
"--name",
|
383 |
+
help="farm name",
|
384 |
+
)
|
385 |
+
|
386 |
+
args = parser.parse_args()
|
387 |
+
f_name = args.file
|
388 |
+
d_name = args.dir
|
389 |
+
farm_name = args.name
|
390 |
+
|
391 |
+
main(f_name, d_name, farm_name)
|
secrets/README.md
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
Store private keys and other secrets in this directory.
|
services/__init__.py
ADDED
File without changes
|
services/job_management.py
ADDED
@@ -0,0 +1,66 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
# from google.cloud import datastore
|
3 |
+
|
4 |
+
# from google.api_core import core
|
5 |
+
from google.cloud import shell_v1 # datastore
|
6 |
+
from google.cloud.shell_v1.services.cloud_shell_service import ( # CloudShellServiceAsyncClient,; transports,
|
7 |
+
CloudShellServiceClient,
|
8 |
+
)
|
9 |
+
from google.oauth2 import service_account
|
10 |
+
|
11 |
+
service_account_secret = "secrets/nowcasting-390306-e80acb3285b4.json"
|
12 |
+
credentials = service_account.Credentials.from_service_account_file(
|
13 |
+
service_account_secret
|
14 |
+
)
|
15 |
+
print(credentials)
|
16 |
+
# client = datastore.Client()
|
17 |
+
client = CloudShellServiceClient(credentials=credentials)
|
18 |
+
|
19 |
+
|
20 |
+
async def sample_authorize_environment(credentials=credentials):
|
21 |
+
# Create a client
|
22 |
+
client = shell_v1.CloudShellServiceAsyncClient(credentials=credentials)
|
23 |
+
|
24 |
+
# Initialize request argument(s)
|
25 |
+
request = shell_v1.AuthorizeEnvironmentRequest()
|
26 |
+
|
27 |
+
# Make the request
|
28 |
+
operation = client.authorize_environment(request=request)
|
29 |
+
|
30 |
+
print("Waiting for operation to complete...")
|
31 |
+
|
32 |
+
response = (await operation).result()
|
33 |
+
|
34 |
+
# Handle the response
|
35 |
+
print(response)
|
36 |
+
|
37 |
+
|
38 |
+
def sample_authorize_environment_sync(credentials=credentials):
|
39 |
+
# Create a client
|
40 |
+
client = shell_v1.CloudShellServiceClient(credentials=credentials)
|
41 |
+
|
42 |
+
# Initialize request argument(s)
|
43 |
+
request = shell_v1.AuthorizeEnvironmentRequest()
|
44 |
+
# Make the request
|
45 |
+
operation = client.authorize_environment(request=request)
|
46 |
+
response = operation.result()
|
47 |
+
# Handle the response
|
48 |
+
print(response)
|
49 |
+
|
50 |
+
print(client.common_billing_account_path)
|
51 |
+
|
52 |
+
|
53 |
+
sample_authorize_environment(credentials)
|
54 |
+
sample_authorize_environment_sync(credentials)
|
55 |
+
# operation = my_api_client.long_running_method()
|
56 |
+
# result = operation.result()
|
57 |
+
|
58 |
+
|
59 |
+
client = shell_v1.CloudShellServiceClient()
|
60 |
+
service_account_secret = "secrets/nowcasting-390306-e80acb3285b4.json"
|
61 |
+
client = client.from_service_account_file(service_account_secret)
|
62 |
+
client.get_environment()
|
63 |
+
request = shell_v1.AuthorizeEnvironmentRequest()
|
64 |
+
operation = client.authorize_environment(request=request)
|
65 |
+
response = operation.result()
|
66 |
+
print(response)
|
setup.cfg
ADDED
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[metadata]
|
2 |
+
name = nowcasting_app_sahandv
|
3 |
+
version = 0.0.3
|
4 |
+
author = Sahand Vahidnia
|
5 |
+
author_email = [email protected]
|
6 |
+
description = Nowcasting software to detect soil water capacity
|
7 |
+
long_description = file: README.md
|
8 |
+
long_description_content_type = text/markdown
|
9 |
+
url = https://github.sydney.edu.au/informatics/PIPE-3788-GRDC-SoilWaterNow-Deployment-App
|
10 |
+
project_urls =
|
11 |
+
Bug Tracker = https://github.sydney.edu.au/informatics/PIPE-3788-GRDC-SoilWaterNow-Deployment-App/issues
|
12 |
+
classifiers =
|
13 |
+
Programming Language :: Python :: 3
|
14 |
+
Operating System :: OS Independent
|
15 |
+
|
16 |
+
[flake8]
|
17 |
+
ignore = E501, W503, E203, E265
|
18 |
+
|
19 |
+
[isort]
|
20 |
+
profile = black
|
templates/admin.html
ADDED
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<!DOCTYPE html>
|
2 |
+
<html lang="en">
|
3 |
+
<head>
|
4 |
+
<meta charset="UTF-8">
|
5 |
+
{% block meta %} {% endblock %}
|
6 |
+
<title>Nowcasting Admin</title>
|
7 |
+
<style>
|
8 |
+
nav a {
|
9 |
+
color: #343434;
|
10 |
+
font-size: 1em;
|
11 |
+
margin-left: 50px;
|
12 |
+
text-decoration: none;
|
13 |
+
}
|
14 |
+
nav a:hover{
|
15 |
+
color: #7a7a7a;
|
16 |
+
}
|
17 |
+
</style>
|
18 |
+
</head>
|
19 |
+
<body>
|
20 |
+
<nav>
|
21 |
+
<a href="{{ url_for('home') }}">Home</a>
|
22 |
+
</nav>
|
23 |
+
<hr>
|
24 |
+
<div class="content">
|
25 |
+
{% block content %} {% endblock %}
|
26 |
+
</div>
|
27 |
+
</body>
|
28 |
+
</html>
|
templates/base.html
ADDED
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<!DOCTYPE html>
|
2 |
+
<html lang="en">
|
3 |
+
<head>
|
4 |
+
<meta charset="UTF-8">
|
5 |
+
{% block meta %} {% endblock %}
|
6 |
+
<title>{% block title %} {% endblock %}</title>
|
7 |
+
<style>
|
8 |
+
nav a {
|
9 |
+
color: #343434;
|
10 |
+
font-size: 1em;
|
11 |
+
margin-left: 50px;
|
12 |
+
text-decoration: none;
|
13 |
+
}
|
14 |
+
nav a:hover{
|
15 |
+
color: #7a7a7a;
|
16 |
+
}
|
17 |
+
</style>
|
18 |
+
</head>
|
19 |
+
<body>
|
20 |
+
<nav>
|
21 |
+
<a href="{{ url_for('home') }}">Home</a>
|
22 |
+
<a href="{{ url_for('/dashapp/') }}">Dashapp Analyser</a>
|
23 |
+
<a href="{{ url_for('dashboard') }}">Dashboard</a>
|
24 |
+
<a href="{{ url_for('addjob') }}">Add Job</a>
|
25 |
+
</nav>
|
26 |
+
<hr>
|
27 |
+
<div class="content">
|
28 |
+
{% block content %} {% endblock %}
|
29 |
+
</div>
|
30 |
+
</body>
|
31 |
+
</html>
|
templates/dash_app.html
ADDED
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<!DOCTYPE html>
|
2 |
+
<html>
|
3 |
+
<head>
|
4 |
+
{%metas%}
|
5 |
+
<title>{%title%}</title>
|
6 |
+
{%favicon%}
|
7 |
+
{%css%}
|
8 |
+
</head>
|
9 |
+
<body>
|
10 |
+
<div class="col-12">
|
11 |
+
<br>
|
12 |
+
<h2>Soil Moisture Comparison Tool</h2>
|
13 |
+
<br>
|
14 |
+
<hr>
|
15 |
+
</div>
|
16 |
+
{%app_entry%}
|
17 |
+
<footer>
|
18 |
+
{%config%}
|
19 |
+
{%scripts%}
|
20 |
+
{%renderer%}
|
21 |
+
</footer>
|
22 |
+
<div class="col-12">
|
23 |
+
<hr>
|
24 |
+
<br>
|
25 |
+
Copyright @ 2023 Sydney Informatics Hub (SIH)
|
26 |
+
<br>
|
27 |
+
</div>
|
28 |
+
</body>
|
29 |
+
</html>
|
templates/index.html
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{% extends 'base.html' %}
|
2 |
+
|
3 |
+
{% block content %}
|
4 |
+
<h1>{% block title %} Home page of the Nowcasting app {% endblock %}</h1>
|
5 |
+
<h1>Hello World!</h1>
|
6 |
+
<h2>Welcome to the FlaskApp!</h2>
|
7 |
+
{% endblock %}
|
templates/job_add.html
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<html>
|
2 |
+
<head>
|
3 |
+
<title>Flask RESTful API Form</title>
|
4 |
+
</head>
|
5 |
+
<body>
|
6 |
+
|
7 |
+
<form action="/jobadd" method="post">
|
8 |
+
<input type="text" name="name">
|
9 |
+
<input type="text" name="age">
|
10 |
+
<input type="submit" value="Submit">
|
11 |
+
</form>
|
12 |
+
|
13 |
+
<script>
|
14 |
+
function generateJSON() {
|
15 |
+
var formData = {};
|
16 |
+
var formElements = document.querySelectorAll("input[name]");
|
17 |
+
for (var i = 0; i < formElements.length; i++) {
|
18 |
+
var formElement = formElements[i];
|
19 |
+
var name = formElement.name;
|
20 |
+
var value = formElement.value;
|
21 |
+
formData[name] = value;
|
22 |
+
}
|
23 |
+
return JSON.stringify(formData);
|
24 |
+
}
|
25 |
+
|
26 |
+
document.querySelector("form").addEventListener("submit", function(event) {
|
27 |
+
event.preventDefault();
|
28 |
+
var jsonData = generateJSON();
|
29 |
+
alert(jsonData);
|
30 |
+
});
|
31 |
+
</script>
|
32 |
+
|
33 |
+
</body>
|
34 |
+
</html>
|
webservice.py
ADDED
File without changes
|