Spaces:
Runtime error
Runtime error
row filter conditions can be seperated for soruce branches
Browse files
app.py
CHANGED
@@ -43,6 +43,25 @@ notion = Client(auth=notion_token)
|
|
43 |
CLIENT = SpeckleClient(host="https://speckle.xyz/")
|
44 |
CLIENT.authenticate_with_token(token=speckle_token)
|
45 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
46 |
# load config to variables
|
47 |
def mainFunc(STREAM_ID, SOURCE_BRANCH, TARGET_BRANCH, UUID_COL, ATTR_METADATA, KPI_METADATA, DEFAULT_ATTRIBUTES, DATASET_NAME):
|
48 |
print("SOURCE_BRANCH",SOURCE_BRANCH)
|
@@ -89,7 +108,6 @@ def mainFunc(STREAM_ID, SOURCE_BRANCH, TARGET_BRANCH, UUID_COL, ATTR_METADATA, K
|
|
89 |
|
90 |
|
91 |
# ======== assamble new stream data ============
|
92 |
-
|
93 |
streamData_new = []
|
94 |
log = {"removedDatapoints":0,"removedID":[], "avg_attrRemoved":0, "removedAttr":[]}
|
95 |
for i, obj in enumerate(stream_data):
|
@@ -98,6 +116,13 @@ def mainFunc(STREAM_ID, SOURCE_BRANCH, TARGET_BRANCH, UUID_COL, ATTR_METADATA, K
|
|
98 |
# COND A: no landuses at all.
|
99 |
pass_flag = True
|
100 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
101 |
tot_sqm =0
|
102 |
for k, v in objDict.items():
|
103 |
if k.startswith("lu+"):
|
@@ -111,8 +136,9 @@ def mainFunc(STREAM_ID, SOURCE_BRANCH, TARGET_BRANCH, UUID_COL, ATTR_METADATA, K
|
|
111 |
pass_flag == False
|
112 |
else:
|
113 |
pass_flag == False
|
114 |
-
|
115 |
|
|
|
|
|
116 |
if pass_flag == False :
|
117 |
log["removedDatapoints"] +=1
|
118 |
log["removedID"].append(objDict[UUID_COL])
|
@@ -128,8 +154,6 @@ def mainFunc(STREAM_ID, SOURCE_BRANCH, TARGET_BRANCH, UUID_COL, ATTR_METADATA, K
|
|
128 |
log["removedAttr"].append(k)
|
129 |
|
130 |
datanew["dataset"] = DATASET_NAME
|
131 |
-
|
132 |
-
|
133 |
streamData_new.append(datanew)
|
134 |
|
135 |
log["avg_attrRemoved"] = log["avg_attrRemoved"]/len(stream_data)
|
@@ -168,8 +192,8 @@ def mainFunc(STREAM_ID, SOURCE_BRANCH, TARGET_BRANCH, UUID_COL, ATTR_METADATA, K
|
|
168 |
@webhook_endpoint
|
169 |
async def update_streams(request: Request):
|
170 |
# Initialize flag
|
|
|
171 |
should_continue = False
|
172 |
-
|
173 |
finalBranchName = None
|
174 |
|
175 |
# Read the request body as JSON
|
@@ -181,12 +205,13 @@ async def update_streams(request: Request):
|
|
181 |
print(config)
|
182 |
print("============= rest ==============")
|
183 |
|
184 |
-
|
185 |
payload = payload["payload"]
|
|
|
186 |
# Check if the payload structure matches the expected format
|
187 |
# Assuming payload["event"]["event_name"] gives you the event type
|
188 |
event_name = payload["event"]["event_name"]
|
189 |
streamName = payload.get("stream", {}).get("name")
|
|
|
190 |
# Extract branchName for commit_update events from the "old" commit data
|
191 |
if event_name == "commit_update":
|
192 |
branchName = payload.get("event", {}).get("data", {}).get("old", {}).get("branchName")
|
@@ -201,10 +226,7 @@ async def update_streams(request: Request):
|
|
201 |
if event_name in valid_event_types:
|
202 |
if streamName in config:
|
203 |
if branchName in config[streamName]:
|
204 |
-
print("found branchName", branchName)
|
205 |
-
finalBranchName = copy.deepcopy(branchName)
|
206 |
stream_config = config[streamName][branchName]
|
207 |
-
print(".----> first config subset", stream_config )
|
208 |
should_continue = True
|
209 |
else:
|
210 |
print(f"Branch name {branchName} not found in config for stream {streamName}.")
|
@@ -216,7 +238,7 @@ async def update_streams(request: Request):
|
|
216 |
# If the flag is True, continue running the main part of the code
|
217 |
if should_continue:
|
218 |
print("is continuing with branch name: ", branchName, "fin branchname", finalBranchName)
|
219 |
-
config_entry = config[streamName][
|
220 |
print("--> selected config subset", config_entry)
|
221 |
STREAM_NAME = config_entry["STREAM_NAME"]
|
222 |
STREAM_ID = config_entry["STREAM_ID"]
|
|
|
43 |
CLIENT = SpeckleClient(host="https://speckle.xyz/")
|
44 |
CLIENT.authenticate_with_token(token=speckle_token)
|
45 |
|
46 |
+
|
47 |
+
def filter_activityNodeAnalysis(objDict):
|
48 |
+
flag = True
|
49 |
+
tot_sqm =0
|
50 |
+
for k, v in objDict.items():
|
51 |
+
if k.startswith("lu+"):
|
52 |
+
tot_sqm += float(v)
|
53 |
+
if tot_sqm <= 0:
|
54 |
+
pass_flag = False
|
55 |
+
|
56 |
+
# COND B: remove non connected
|
57 |
+
if "isConnected" in objDict.keys():
|
58 |
+
if objDict["isConnected"] == False:
|
59 |
+
flag == False
|
60 |
+
else:
|
61 |
+
flag == False
|
62 |
+
return flag
|
63 |
+
|
64 |
+
|
65 |
# load config to variables
|
66 |
def mainFunc(STREAM_ID, SOURCE_BRANCH, TARGET_BRANCH, UUID_COL, ATTR_METADATA, KPI_METADATA, DEFAULT_ATTRIBUTES, DATASET_NAME):
|
67 |
print("SOURCE_BRANCH",SOURCE_BRANCH)
|
|
|
108 |
|
109 |
|
110 |
# ======== assamble new stream data ============
|
|
|
111 |
streamData_new = []
|
112 |
log = {"removedDatapoints":0,"removedID":[], "avg_attrRemoved":0, "removedAttr":[]}
|
113 |
for i, obj in enumerate(stream_data):
|
|
|
116 |
# COND A: no landuses at all.
|
117 |
pass_flag = True
|
118 |
|
119 |
+
if "activity_node_analysis" in SOURCE_BRANCH:
|
120 |
+
pass_flag = filter_activityNodeAnalysis(objDict)
|
121 |
+
elif "isovist_analysis" in SOURCE_BRANCH:
|
122 |
+
# to filter conditions defined yet
|
123 |
+
pass
|
124 |
+
|
125 |
+
"""
|
126 |
tot_sqm =0
|
127 |
for k, v in objDict.items():
|
128 |
if k.startswith("lu+"):
|
|
|
136 |
pass_flag == False
|
137 |
else:
|
138 |
pass_flag == False
|
|
|
139 |
|
140 |
+
"""
|
141 |
+
|
142 |
if pass_flag == False :
|
143 |
log["removedDatapoints"] +=1
|
144 |
log["removedID"].append(objDict[UUID_COL])
|
|
|
154 |
log["removedAttr"].append(k)
|
155 |
|
156 |
datanew["dataset"] = DATASET_NAME
|
|
|
|
|
157 |
streamData_new.append(datanew)
|
158 |
|
159 |
log["avg_attrRemoved"] = log["avg_attrRemoved"]/len(stream_data)
|
|
|
192 |
@webhook_endpoint
|
193 |
async def update_streams(request: Request):
|
194 |
# Initialize flag
|
195 |
+
|
196 |
should_continue = False
|
|
|
197 |
finalBranchName = None
|
198 |
|
199 |
# Read the request body as JSON
|
|
|
205 |
print(config)
|
206 |
print("============= rest ==============")
|
207 |
|
|
|
208 |
payload = payload["payload"]
|
209 |
+
|
210 |
# Check if the payload structure matches the expected format
|
211 |
# Assuming payload["event"]["event_name"] gives you the event type
|
212 |
event_name = payload["event"]["event_name"]
|
213 |
streamName = payload.get("stream", {}).get("name")
|
214 |
+
|
215 |
# Extract branchName for commit_update events from the "old" commit data
|
216 |
if event_name == "commit_update":
|
217 |
branchName = payload.get("event", {}).get("data", {}).get("old", {}).get("branchName")
|
|
|
226 |
if event_name in valid_event_types:
|
227 |
if streamName in config:
|
228 |
if branchName in config[streamName]:
|
|
|
|
|
229 |
stream_config = config[streamName][branchName]
|
|
|
230 |
should_continue = True
|
231 |
else:
|
232 |
print(f"Branch name {branchName} not found in config for stream {streamName}.")
|
|
|
238 |
# If the flag is True, continue running the main part of the code
|
239 |
if should_continue:
|
240 |
print("is continuing with branch name: ", branchName, "fin branchname", finalBranchName)
|
241 |
+
config_entry = config[streamName][branchName]
|
242 |
print("--> selected config subset", config_entry)
|
243 |
STREAM_NAME = config_entry["STREAM_NAME"]
|
244 |
STREAM_ID = config_entry["STREAM_ID"]
|