File size: 7,013 Bytes
5bf66f9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ad0b5fe
5bf66f9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
744d282
 
5bf66f9
 
 
a3c0add
 
74638a0
0a2ae78
 
 
 
 
a3c0add
 
 
 
 
 
 
 
 
5bf66f9
a3c0add
5bf66f9
a3c0add
5bf66f9
a3c0add
5bf66f9
 
 
f3a5171
 
 
 
 
 
 
 
 
 
5bf66f9
a3c0add
5bf66f9
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
import json
import re
import sys
import time
import copy
from notion_client import Client

from specklepy.api.client import SpeckleClient
from specklepy.api.credentials import get_default_account, get_local_accounts
from specklepy.transports.server import ServerTransport
from specklepy.api import operations
from specklepy.objects.geometry import Polyline, Point
from specklepy.objects import Base
import os
from functools import wraps

import gradio as gr
import requests
from huggingface_hub import webhook_endpoint, WebhookPayload
from fastapi import Request
import datetime

import gradio as gr


from utils import *

current_directory = os.path.dirname(os.path.abspath(__file__))
# Path to the config.json file
config_file_path = os.path.join(current_directory, "config.json")
with open(config_file_path, 'r') as f:
        config = json.load(f)





# notion login
speckle_token = os.environ.get("SPECKLE_TOKEN")
notion_token = os.environ.get("NOTION_TOKEN")

notion_token = notion_token
notion = Client(auth=notion_token)

# speckle 
CLIENT = SpeckleClient(host="https://speckle.xyz/")
CLIENT.authenticate_with_token(token=speckle_token)

# load config to variables 



def mainFunc(STREAM_ID, SOURCE_BRANCH, TARGET_BRANCH, UUID_COL, ATTR_METADATA, KPI_METADATA, DEFAULT_ATTRIBUTES):

    if type(TARGET_BRANCH) == type([]):
        TARGET_BRANCH = SOURCE_BRANCH.replace(TARGET_BRANCH[0], TARGET_BRANCH[1])
    
    # get data from notion
    databaseFUll_pages  = fetch_all_database_pages(notion, ATTR_METADATA)
    kpi_database_pages = fetch_all_database_pages(notion, KPI_METADATA)

    # generate JSON files
    attributeMetaData, availableAttributes = notionTable2JSON(databaseFUll_pages, kpi_database_pages)

    # extract attribute/colum/feature names from notion table 
    attributesOfInterest = DEFAULT_ATTRIBUTES
    for page in databaseFUll_pages:
        pv = get_property_value(page, "name")
        attributesOfInterest.append(pv)
        if UUID_COL not in attributesOfInterest:
            attributesOfInterest.append(UUID_COL)


    # get speckle data 
            # get stream
    stream = getSpeckleStream(STREAM_ID,
                            SOURCE_BRANCH,
                            CLIENT,
                            commit_id = "")

    # navigate to list with speckle objects of interest
    try:
      stream_data = stream["@Data"]["@{0}"]
    except:
       print("something went wrong, try again with non-capital d")
    try:
     stream_data = stream["@data"]["@{0}"]
    except:
        print("check on speckle.com how to access the data")
    

    # ======== assamble new stream data ============

    streamData_new = []
    log = {"removedDatapoints":0,"removedID":[], "avg_attrRemoved":0, "removedAttr":[]}
    for i, obj in enumerate(stream_data):
        objDict = obj.__dict__
        # REMOVE DATA POINTS ==============================
        # COND A: no landuses at all.
        pass_flag = True

        tot_sqm =0
        for k, v in objDict.items():
            if k.startswith("lu+"):
                tot_sqm += float(v)
        if tot_sqm <= 0:
            pass_flag = False 
        

        if pass_flag == False :
            log["removedDatapoints"] +=1
            log["removedID"].append(objDict[UUID_COL])
            continue

        # REMOVE ATTRIBUTES ===============================
        datanew = Base()
        for k, v in objDict.items():
            if k in attributesOfInterest:
                datanew[k] = v
            else:
                log["avg_attrRemoved"] +=1
                log["removedAttr"].append(k)
        

        streamData_new.append(datanew)

        log["avg_attrRemoved"] = log["avg_attrRemoved"]/len(stream_data)
        log["removedAttr"] = list(set(log["removedAttr"]))

        stream_new = copy.deepcopy(stream)

        stream_new["@Data"]["@{0}"] = streamData_new
        #add additional data to stream
        stream_new["logs"] = json.dumps(log)
        stream_new["attributeMetaData"] = json.dumps(attributeMetaData)
        stream_new["availableAttributes"] = json.dumps(availableAttributes)


        # set stream and branch
        # Get transport
        transport = ServerTransport(client=CLIENT, stream_id=STREAM_ID)
        # Send the data object to the speckle stream
        object_id = operations.send(stream_new, [transport])

        # Create a new commit with the new object
        commit_id = CLIENT.commit.create(
            STREAM_ID,
            object_id= object_id,
            message="auto commit, removed datapoints: " + str( log["removedDatapoints"]) + "avg. removed attributes: "+ str(log["avg_attrRemoved"]),
            branch_name=TARGET_BRANCH,
        )

        print(commit_id)


@webhook_endpoint
async def update_streams(request: Request):
    # Initialize flag
    should_continue = False

    # Read the request body as JSON
    payload = await request.json()

    print("============= payload =============")
    print(payload)
    print("============= payload =============")
    print(config)
    print("============= config =============")

    payload = payload["payload"]
    # Check if the payload structure matches the expected format
    # Assuming payload["event"]["event_name"] gives you the event type
    event_name = payload["event"]["event_name"]
    streamName = payload.get("stream", {}).get("name")
    # Extract branchName for commit_update events from the "old" commit data
    if event_name == "commit_update":
        branchName = payload.get("event", {}).get("data", {}).get("old", {}).get("branchName")
    else:
        branchName = payload.get("event", {}).get("data", {}).get("commit", {}).get("branchName")
    
    # List of valid event types
    valid_event_types = ["commit_create", "commit_delete", "commit_update"]
    
    if event_name in valid_event_types:
        if streamName in config:
            if branchName in config[streamName]:
                stream_config = config[streamName][branchName]
                should_continue = True
            else:
                print(f"Branch name {branchName} not found in config for stream {streamName}.")
        else:
            print(f"Stream name {streamName} not found in config.")
    else:
        print(f"Event type {event_name} is not one of the specified types.")

    # If the flag is True, continue running the main part of the code
    if should_continue:
        config_entry = config[streamName][branchName]

        STREAM_NAME = config_entry["STREAM_NAME"]
        STREAM_ID = config_entry["STREAM_ID"]
        SOURCE_BRANCH = config_entry["SOURCE_BRANCH"]
        TARGET_BRANCH = config_entry["TARGET_BRANCH"]
        UUID_COL = config_entry["UUID_COL"]
        ATTR_METADATA = config_entry["ATTR_METADATA"]
        KPI_METADATA = config_entry["KPI_METADATA"]
        DEFAULT_ATTRIBUTES = config_entry["DEFAULT_ATTRIBUTES"]

        # main
        mainFunc(STREAM_ID, SOURCE_BRANCH, TARGET_BRANCH, UUID_COL, ATTR_METADATA, KPI_METADATA, DEFAULT_ATTRIBUTES)