nastasiasnk commited on
Commit
a029712
1 Parent(s): 3cc6a9a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -69
app.py CHANGED
@@ -8,17 +8,11 @@ import os
8
 
9
  # ---------------------- Accessing data from Notion ---------------------- #
10
 
11
-
12
  from notion_client import Client as client_notion
13
  from imports_utils import fetch_all_database_pages
14
  from imports_utils import get_property_value
15
- #from imports_utils import notion
16
  from imports_utils import getDataFromNotion
17
- #from imports_utils import fetchDomainMapper
18
- #from imports_utils import fetchSubdomainMapper
19
-
20
  from imports_utils import notionToken
21
-
22
  from config import useNotionData
23
  from config import landuseDatabaseId , subdomainAttributesDatabaseId
24
  from config import landuseColumnName
@@ -57,13 +51,11 @@ from specklepy.api import operations
57
  from specklepy.objects.geometry import Polyline, Point
58
  from specklepy.objects import Base
59
 
60
- #import imports_utils
61
  import speckle_utils
62
  import data_utils
63
 
64
  from config import landuseDatabaseId , streamId, dmBranchName, dmCommitId, luBranchName, luCommitId, distanceMatrixActivityNodes
65
  from imports_utils import speckleToken
66
- #from imports_utils import fetchDistanceMatrices
67
  from config import useSpeckleData
68
  from imports_utils import getDataFromSpeckle
69
 
@@ -94,8 +86,7 @@ else:
94
  df_lu = df_lu.apply(pd.to_numeric, errors='coerce')
95
  df_lu = df_lu.astype(int)
96
  df_lu = df_lu.T.groupby(level=0).sum().T
97
-
98
-
99
 
100
 
101
  def test(input_json):
@@ -112,14 +103,18 @@ def test(input_json):
112
  from imports_utils import getDataFromGrasshopper
113
  from config import alpha as alphaDefault
114
  from config import threshold as thresholdDefault
 
 
 
 
 
 
 
 
 
115
 
116
  useGrasshopperData = inputs['input']["useGrasshopperData"] # fetch grasshoper data or not
117
-
118
-
119
- if useGrasshopperData == "True": # grasshopper input
120
-
121
- # fetch grasshoper data or not
122
-
123
  dfMatrix_gh, dfLanduses_gh, attributeMapperDict_gh, landuseMapperDict_gh, alpha, threshold = getDataFromGrasshopper(
124
  inputJson = inputs,
125
  inputNameMatrix = "matrix",
@@ -136,27 +131,7 @@ def test(input_json):
136
  if useNotionData != True:
137
  attributeMapperDict = attributeMapperDict_gh
138
  landuseMapperDict = landuseMapperDict_gh
139
-
140
-
141
- """
142
- matrix = inputs['input']["matrix"]
143
- landuses = inputs['input']["landuse_areas"]
144
-
145
- dfLanduses = pd.DataFrame(landuses).T
146
- dfLanduses = dfLanduses.apply(pd.to_numeric, errors='coerce')
147
- dfLanduses = dfLanduses.replace([np.inf, -np.inf], 0).fillna(0) # cleaning function?
148
- dfLanduses = dfLanduses.round(0).astype(int)
149
-
150
- dfMatrix = pd.DataFrame(matrix).T
151
- dfMatrix = dfMatrix.apply(pd.to_numeric, errors='coerce')
152
- dfMatrix = dfMatrix.replace([np.inf, -np.inf], 10000).fillna(0)
153
- dfMatrix = dfMatrix.round(0).astype(int)
154
-
155
- attributeMapperDict_gh = inputs['input']["attributeMapperDict"]
156
- landuseMapperDict_gh = inputs['input']["landuseMapperDict"] # if fetch notion data or not, def
157
-
158
- """
159
-
160
  else:
161
 
162
  dfMatrix_gh, dfLanduses_gh, attributeMapperDict_gh, landuseMapperDict_gh, alpha, threshold = getDataFromGrasshopper(
@@ -169,53 +144,24 @@ def test(input_json):
169
  inputNameThreshold = "threshold"
170
  )
171
 
172
- dfLanduses = df_lu.copy() # fetch speckl data or not
173
  dfMatrix = df_dm.copy()
174
-
175
-
176
 
177
  landuseMapperDict = lu_mapperDict
178
  livabilityMapperDict = subdomain_mapperDict
179
 
180
- """
181
- valid_indexes = [idx for idx in mask_connected if idx in dfLanduses.index]
182
- # Identify and report missing indexes
183
- missing_indexes = set(mask_connected) - set(valid_indexes)
184
- if missing_indexes:
185
- print(f"Error: The following indexes were not found in the DataFrame: {missing_indexes}, length: {len(missing_indexes)}")
186
-
187
- # Apply the filtered mask
188
- dfLanduses_filtered = dfLanduses.loc[valid_indexes]
189
- """
190
-
191
- from imports_utils import findUniqueDomains
192
- from imports_utils import findUniqueSubdomains
193
-
194
- from imports_utils import landusesToSubdomains
195
- from imports_utils import FindWorkplacesNumber
196
- from imports_utils import computeAccessibility
197
- from imports_utils import computeAccessibility_pointOfInterest
198
- from imports_utils import remap
199
- from imports_utils import accessibilityToLivability
200
-
201
-
202
  domainsUnique = findUniqueDomains(livabilityMapperDict)
203
  subdomainsUnique = findUniqueSubdomains(landuseMapperDict)
204
 
205
  LivabilitySubdomainsWeights = landusesToSubdomains(dfMatrix,dfLanduses,landuseMapperDict,subdomainsUnique)
206
-
207
  WorkplacesNumber = FindWorkplacesNumber(dfMatrix,livabilityMapperDict,LivabilitySubdomainsWeights,subdomainsUnique)
208
 
209
  # prepare an input weights dataframe for the parameter LivabilitySubdomainsInputs
210
  LivabilitySubdomainsInputs =pd.concat([LivabilitySubdomainsWeights, WorkplacesNumber], axis=1)
211
-
212
  subdomainsAccessibility = computeAccessibility(dfMatrix,LivabilitySubdomainsInputs,alpha,threshold)
213
- #artAccessibility = computeAccessibility_pointOfInterest(df_art_matrix,'ART',alpha,threshold)
214
- #gmtAccessibility = computeAccessibility_pointOfInterest(df_gmt_matrix,'GMT+HSR',alpha,threshold)
215
-
216
- #AccessibilityInputs = pd.concat([subdomainsAccessibility, artAccessibility,gmtAccessibility], axis=1)
217
-
218
  livability = accessibilityToLivability(dfMatrix,subdomainsAccessibility,livabilityMapperDict,domainsUnique)
 
219
 
220
  livability_dictionary = livability.to_dict('index')
221
  LivabilitySubdomainsInputs_dictionary = LivabilitySubdomainsInputs.to_dict('index')
 
8
 
9
  # ---------------------- Accessing data from Notion ---------------------- #
10
 
 
11
  from notion_client import Client as client_notion
12
  from imports_utils import fetch_all_database_pages
13
  from imports_utils import get_property_value
 
14
  from imports_utils import getDataFromNotion
 
 
 
15
  from imports_utils import notionToken
 
16
  from config import useNotionData
17
  from config import landuseDatabaseId , subdomainAttributesDatabaseId
18
  from config import landuseColumnName
 
51
  from specklepy.objects.geometry import Polyline, Point
52
  from specklepy.objects import Base
53
 
 
54
  import speckle_utils
55
  import data_utils
56
 
57
  from config import landuseDatabaseId , streamId, dmBranchName, dmCommitId, luBranchName, luCommitId, distanceMatrixActivityNodes
58
  from imports_utils import speckleToken
 
59
  from config import useSpeckleData
60
  from imports_utils import getDataFromSpeckle
61
 
 
86
  df_lu = df_lu.apply(pd.to_numeric, errors='coerce')
87
  df_lu = df_lu.astype(int)
88
  df_lu = df_lu.T.groupby(level=0).sum().T
89
+
 
90
 
91
 
92
  def test(input_json):
 
103
  from imports_utils import getDataFromGrasshopper
104
  from config import alpha as alphaDefault
105
  from config import threshold as thresholdDefault
106
+
107
+ from imports_utils import findUniqueDomains
108
+ from imports_utils import findUniqueSubdomains
109
+ from imports_utils import landusesToSubdomains
110
+ from imports_utils import FindWorkplacesNumber
111
+ from imports_utils import computeAccessibility
112
+ from imports_utils import computeAccessibility_pointOfInterest
113
+ from imports_utils import remap
114
+ from imports_utils import accessibilityToLivability
115
 
116
  useGrasshopperData = inputs['input']["useGrasshopperData"] # fetch grasshoper data or not
117
+ if useGrasshopperData == "True":
 
 
 
 
 
118
  dfMatrix_gh, dfLanduses_gh, attributeMapperDict_gh, landuseMapperDict_gh, alpha, threshold = getDataFromGrasshopper(
119
  inputJson = inputs,
120
  inputNameMatrix = "matrix",
 
131
  if useNotionData != True:
132
  attributeMapperDict = attributeMapperDict_gh
133
  landuseMapperDict = landuseMapperDict_gh
134
+
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
135
  else:
136
 
137
  dfMatrix_gh, dfLanduses_gh, attributeMapperDict_gh, landuseMapperDict_gh, alpha, threshold = getDataFromGrasshopper(
 
144
  inputNameThreshold = "threshold"
145
  )
146
 
147
+ dfLanduses = df_lu.copy()
148
  dfMatrix = df_dm.copy()
149
+
 
150
 
151
  landuseMapperDict = lu_mapperDict
152
  livabilityMapperDict = subdomain_mapperDict
153
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
154
  domainsUnique = findUniqueDomains(livabilityMapperDict)
155
  subdomainsUnique = findUniqueSubdomains(landuseMapperDict)
156
 
157
  LivabilitySubdomainsWeights = landusesToSubdomains(dfMatrix,dfLanduses,landuseMapperDict,subdomainsUnique)
 
158
  WorkplacesNumber = FindWorkplacesNumber(dfMatrix,livabilityMapperDict,LivabilitySubdomainsWeights,subdomainsUnique)
159
 
160
  # prepare an input weights dataframe for the parameter LivabilitySubdomainsInputs
161
  LivabilitySubdomainsInputs =pd.concat([LivabilitySubdomainsWeights, WorkplacesNumber], axis=1)
 
162
  subdomainsAccessibility = computeAccessibility(dfMatrix,LivabilitySubdomainsInputs,alpha,threshold)
 
 
 
 
 
163
  livability = accessibilityToLivability(dfMatrix,subdomainsAccessibility,livabilityMapperDict,domainsUnique)
164
+
165
 
166
  livability_dictionary = livability.to_dict('index')
167
  LivabilitySubdomainsInputs_dictionary = LivabilitySubdomainsInputs.to_dict('index')