File size: 9,431 Bytes
5bf66f9
 
 
 
 
 
8d0d57c
5bf66f9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ad0b5fe
5bf66f9
 
 
 
 
 
 
c430d81
 
 
ff73556
 
 
 
 
 
 
 
 
 
 
 
 
c430d81
 
 
 
ff73556
5bf66f9
73151d8
5b34fe4
 
5bf66f9
 
5b34fe4
5bf66f9
 
 
efcf4ed
5bf66f9
efcf4ed
5bf66f9
 
 
 
 
 
d16b0da
5bf66f9
 
d16b0da
 
 
5bf66f9
 
 
 
 
 
faefffe
5b34fe4
73151d8
5bf66f9
 
 
efcf4ed
5bf66f9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c430d81
 
 
 
 
 
 
5bf66f9
 
 
 
 
 
 
 
 
d16b0da
 
 
 
01a9e73
d16b0da
 
 
 
 
 
5bf66f9
 
 
 
ff85ad7
5bf66f9
 
b3c2436
 
 
 
0b53d44
 
b3c2436
 
 
 
ed94ca0
b3c2436
 
 
 
 
0b53d44
b3c2436
 
50bc92f
d205ac7
b3c2436
d205ac7
b3c2436
 
 
9a1ece1
b3c2436
 
 
 
5bf66f9
addaa94
 
5bf66f9
 
 
 
 
8d0d57c
 
5bf66f9
 
 
 
 
0b53d44
744d282
1339ce3
0b53d44
5bf66f9
1339ce3
ba5b40a
73151d8
 
 
 
 
 
ba5b40a
73151d8
0a2ae78
73151d8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5bf66f9
73151d8
5bf66f9
73151d8
5bf66f9
1339ce3
73151d8
5bf66f9
 
c430d81
f3a5171
 
 
 
 
 
 
 
0b53d44
 
a3c0add
addaa94
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
import json
import re
import sys
import time
import copy
from notion_client import Client
import copy 

from specklepy.api.client import SpeckleClient
from specklepy.api.credentials import get_default_account, get_local_accounts
from specklepy.transports.server import ServerTransport
from specklepy.api import operations
from specklepy.objects.geometry import Polyline, Point
from specklepy.objects import Base
import os
from functools import wraps

import gradio as gr
import requests
from huggingface_hub import webhook_endpoint, WebhookPayload
from fastapi import Request
import datetime

import gradio as gr


from utils import *

current_directory = os.path.dirname(os.path.abspath(__file__))
# Path to the config.json file
config_file_path = os.path.join(current_directory, "config.json")
with open(config_file_path, 'r') as f:
        config = json.load(f)

# notion login
speckle_token = os.environ.get("SPECKLE_TOKEN")
notion_token = os.environ.get("NOTION_TOKEN")

notion_token = notion_token
notion = Client(auth=notion_token)

# speckle 
CLIENT = SpeckleClient(host="https://speckle.xyz/")
CLIENT.authenticate_with_token(token=speckle_token)


def filter_activityNodeAnalysis(objDict):
    flag = True    
    tot_sqm = 0
    for k, v in objDict.items():
        if k.startswith("lu+"):
            tot_sqm += float(v)
    if tot_sqm <= 0:
        flag = False 

    # COND B: remove non connected 
    if "isConnected" in objDict.keys():
        if not objDict["isConnected"]:
            flag = False
    else:
        flag = False

    return flag



# load config to variables 
def mainFunc(STREAM_ID, SOURCE_BRANCH, TARGET_BRANCH, UUID_COL, ATTR_METADATA, KPI_METADATA, DEFAULT_ATTRIBUTES, DATASET_NAME, UPDATE_SRC):
    print("SOURCE_BRANCH",SOURCE_BRANCH)
    print("TARGET_BRANCH",TARGET_BRANCH)
    if type(TARGET_BRANCH) == type([]):
        TARGET_BRANCH = SOURCE_BRANCH.replace(TARGET_BRANCH[0], TARGET_BRANCH[1])
    print("TARGET_BRANCH Final",TARGET_BRANCH)
    
    # get data from notion
    databaseFUll_pages  = fetch_all_database_pages(notion, ATTR_METADATA)
    time.sleep(1)
    kpi_database_pages = fetch_all_database_pages(notion, KPI_METADATA)
    time.sleep(1)   

    # generate JSON files
    attributeMetaData, availableAttributes = notionTable2JSON(databaseFUll_pages, kpi_database_pages)

    # extract attribute/colum/feature names from notion table 
    attributesOfInterest = DEFAULT_ATTRIBUTES
    attributesOfInterestDetails = {}
    for page in databaseFUll_pages:
        pv = get_property_value(page, "name")
        isUsed = get_property_value(page, "isUsed")
        # can be used for filtering
        attributesOfInterestDetails[pv] = {"isUsed":isUsed}
        attributesOfInterest.append(pv)
        if UUID_COL not in attributesOfInterest:
            attributesOfInterest.append(UUID_COL)


    # get speckle data 
    # get stream
    print("getting source branch, from branch: ",SOURCE_BRANCH)
    stream, inputCommitID = getSpeckleStream(STREAM_ID,
                            SOURCE_BRANCH,
                            CLIENT,
                            commit_id = "")
    time.sleep(2)
    # navigate to list with speckle objects of interest
    try:
      stream_data = stream["@Data"]["@{0}"]
    except:
       print("something went wrong, try again with non-capital d")
    try:
     stream_data = stream["@data"]["@{0}"]
    except:
        print("check on speckle.com how to access the data")
    

    # ======== assamble new stream data ============
    streamData_new = []
    log = {"removedDatapoints":0,"removedID":[], "avg_attrRemoved":0, "removedAttr":[]}
    for i, obj in enumerate(stream_data):
        objDict = obj.__dict__
        # REMOVE DATA POINTS ==============================
        # COND A: no landuses at all.
        pass_flag = True

        if "activity_node_analysis" in SOURCE_BRANCH:
            pass_flag = filter_activityNodeAnalysis(objDict)
        elif "isovist_analysis" in SOURCE_BRANCH:
            # to filter conditions defined yet
            pass
            
    
        if pass_flag == False :
            log["removedDatapoints"] +=1
            log["removedID"].append(objDict[UUID_COL])
            continue

        # REMOVE ATTRIBUTES ===============================
        datanew = Base()
        for k, v in objDict.items():
            if k in attributesOfInterest:
                AoIDetailsFlag = True 
                try:
                    # default to include property 
                    isUsedFlag = attributesOfInterestDetails[k].get("isUsed", True)  
                    if isUsedFlag == False or isUsedFlag == "False" :  
                        AoIDetailsFlag = False
                except KeyError as e:  # Catching specific exceptions for clarity
                    print(f"AoIDetails test failed for key {k}: {e}")
                
                if AoIDetailsFlag:
                    datanew[k] = v
            else:
                log["avg_attrRemoved"] +=1
                log["removedAttr"].append(k)
        
        datanew["dataset"] = DATASET_NAME
        streamData_new.append(datanew)

    log["avg_attrRemoved"] = log["avg_attrRemoved"]/len(stream_data)
    log["removedAttr"] = list(set(log["removedAttr"]))

    stream_new = copy.deepcopy(stream)
    stream_new["@Data"]["@{0}"] = streamData_new
    
    #add additional data to stream
    stream_new["logs"] = json.dumps(log)
    stream_new["attributeMetaData"] = json.dumps(attributeMetaData)
    stream_new["availableAttributes"] = json.dumps(availableAttributes)
    stream_new["datasetMetadata"] = DATASET_NAME


    # set stream and branch
    # Get transport
    transport = ServerTransport(client=CLIENT, stream_id=STREAM_ID)
    
    # Send the data object to the speckle stream
    object_id = operations.send(stream_new, [transport])
    time.sleep(1)
    
    # Create a new commit with the new object
    print("----> attempting to make commit to: ",TARGET_BRANCH )
    commit_id = CLIENT.commit.create(
        STREAM_ID,
        object_id= object_id,
        message = "auto commit from HF; Triggered by: " + UPDATE_SRC + ", removed datapoints: " + str(log["removedDatapoints"]) + "; avg. removed attributes: " + str(int(log["avg_attrRemoved"])) + "#+SourceCommit: " + inputCommitID,
        branch_name=TARGET_BRANCH,
    )

    print(commit_id)

    return "https://speckle.xyz/streams/" + STREAM_ID + "/commits/" + commit_id


@webhook_endpoint
async def update_streams(request: Request):
    # Initialize flag
    should_continue = False
    finalBranchName = None

    # Read the request body as JSON
    payload = await request.json()

    print("============= payload =============")
    print(payload)
    print("============= config ==============")
    print(config)
    print("===================================")

    payload = payload["payload"]

    # webhook calls can come from different sources 
    if payload.get('source') == 'notionTrigger':
        action = payload.get('action')
        streamName = payload.get('streamName')
        branchName = payload.get('branchName')
        update_source = "notionTrigger"

        should_continue = True

    else:
        # Check if the payload structure matches the expected format
        # Assuming payload["event"]["event_name"] gives you the event type
        update_source = "SpeckleWebhook"
        event_name = payload["event"]["event_name"]
        streamName = payload.get("stream", {}).get("name")
        
        # Extract branchName for commit_update events from the "old" commit data
        if event_name == "commit_update":
            branchName = payload.get("event", {}).get("data", {}).get("old", {}).get("branchName")
           
        else:
            branchName = payload.get("event", {}).get("data", {}).get("commit", {}).get("branchName")
        
        
        # List of valid event types
        valid_event_types = ["commit_create", "commit_delete", "commit_update"]
        
        if event_name in valid_event_types:
            if streamName in config:
                if branchName in config[streamName]:
                    stream_config = config[streamName][branchName]
                    should_continue = True
                else:
                    print(f"Branch name {branchName} not found in config for stream {streamName}.")
            else:
                print(f"Stream name {streamName} not found in config.")
        else:
            print(f"Event type {event_name} is not one of the specified types.")

    print("payload branchname", branchName)
    
    # If the flag is True, continue running the main part of the code
    if should_continue:
        config_entry = config[streamName][branchName]
        STREAM_NAME = config_entry["STREAM_NAME"]
        STREAM_ID = config_entry["STREAM_ID"]
        SOURCE_BRANCH = config_entry["SOURCE_BRANCH"]
        TARGET_BRANCH = config_entry["TARGET_BRANCH"]
        UUID_COL = config_entry["UUID_COL"]
        ATTR_METADATA = config_entry["ATTR_METADATA"]
        KPI_METADATA = config_entry["KPI_METADATA"]
        DEFAULT_ATTRIBUTES = config_entry["DEFAULT_ATTRIBUTES"]
        DATASET_NAME = config_entry.get("DATASET_NAME")
        
        # main
        commit_url =mainFunc(STREAM_ID, SOURCE_BRANCH, TARGET_BRANCH, UUID_COL, ATTR_METADATA, KPI_METADATA, DEFAULT_ATTRIBUTES, DATASET_NAME, update_source)
    return commit_url