serJD's picture
Update app.py
f3a5171 verified
raw
history blame
7.01 kB
import json
import re
import sys
import time
import copy
from notion_client import Client
from specklepy.api.client import SpeckleClient
from specklepy.api.credentials import get_default_account, get_local_accounts
from specklepy.transports.server import ServerTransport
from specklepy.api import operations
from specklepy.objects.geometry import Polyline, Point
from specklepy.objects import Base
import os
from functools import wraps
import gradio as gr
import requests
from huggingface_hub import webhook_endpoint, WebhookPayload
from fastapi import Request
import datetime
import gradio as gr
from utils import *
current_directory = os.path.dirname(os.path.abspath(__file__))
# Path to the config.json file
config_file_path = os.path.join(current_directory, "config.json")
with open(config_file_path, 'r') as f:
config = json.load(f)
# notion login
speckle_token = os.environ.get("SPECKLE_TOKEN")
notion_token = os.environ.get("NOTION_TOKEN")
notion_token = notion_token
notion = Client(auth=notion_token)
# speckle
CLIENT = SpeckleClient(host="https://speckle.xyz/")
CLIENT.authenticate_with_token(token=speckle_token)
# load config to variables
def mainFunc(STREAM_ID, SOURCE_BRANCH, TARGET_BRANCH, UUID_COL, ATTR_METADATA, KPI_METADATA, DEFAULT_ATTRIBUTES):
if type(TARGET_BRANCH) == type([]):
TARGET_BRANCH = SOURCE_BRANCH.replace(TARGET_BRANCH[0], TARGET_BRANCH[1])
# get data from notion
databaseFUll_pages = fetch_all_database_pages(notion, ATTR_METADATA)
kpi_database_pages = fetch_all_database_pages(notion, KPI_METADATA)
# generate JSON files
attributeMetaData, availableAttributes = notionTable2JSON(databaseFUll_pages, kpi_database_pages)
# extract attribute/colum/feature names from notion table
attributesOfInterest = DEFAULT_ATTRIBUTES
for page in databaseFUll_pages:
pv = get_property_value(page, "name")
attributesOfInterest.append(pv)
if UUID_COL not in attributesOfInterest:
attributesOfInterest.append(UUID_COL)
# get speckle data
# get stream
stream = getSpeckleStream(STREAM_ID,
SOURCE_BRANCH,
CLIENT,
commit_id = "")
# navigate to list with speckle objects of interest
try:
stream_data = stream["@Data"]["@{0}"]
except:
print("something went wrong, try again with non-capital d")
try:
stream_data = stream["@data"]["@{0}"]
except:
print("check on speckle.com how to access the data")
# ======== assamble new stream data ============
streamData_new = []
log = {"removedDatapoints":0,"removedID":[], "avg_attrRemoved":0, "removedAttr":[]}
for i, obj in enumerate(stream_data):
objDict = obj.__dict__
# REMOVE DATA POINTS ==============================
# COND A: no landuses at all.
pass_flag = True
tot_sqm =0
for k, v in objDict.items():
if k.startswith("lu+"):
tot_sqm += float(v)
if tot_sqm <= 0:
pass_flag = False
if pass_flag == False :
log["removedDatapoints"] +=1
log["removedID"].append(objDict[UUID_COL])
continue
# REMOVE ATTRIBUTES ===============================
datanew = Base()
for k, v in objDict.items():
if k in attributesOfInterest:
datanew[k] = v
else:
log["avg_attrRemoved"] +=1
log["removedAttr"].append(k)
streamData_new.append(datanew)
log["avg_attrRemoved"] = log["avg_attrRemoved"]/len(stream_data)
log["removedAttr"] = list(set(log["removedAttr"]))
stream_new = copy.deepcopy(stream)
stream_new["@Data"]["@{0}"] = streamData_new
#add additional data to stream
stream_new["logs"] = json.dumps(log)
stream_new["attributeMetaData"] = json.dumps(attributeMetaData)
stream_new["availableAttributes"] = json.dumps(availableAttributes)
# set stream and branch
# Get transport
transport = ServerTransport(client=CLIENT, stream_id=STREAM_ID)
# Send the data object to the speckle stream
object_id = operations.send(stream_new, [transport])
# Create a new commit with the new object
commit_id = CLIENT.commit.create(
STREAM_ID,
object_id= object_id,
message="auto commit, removed datapoints: " + str( log["removedDatapoints"]) + "avg. removed attributes: "+ str(log["avg_attrRemoved"]),
branch_name=TARGET_BRANCH,
)
print(commit_id)
@webhook_endpoint
async def update_streams(request: Request):
# Initialize flag
should_continue = False
# Read the request body as JSON
payload = await request.json()
print("============= payload =============")
print(payload)
print("============= payload =============")
print(config)
print("============= config =============")
payload = payload["payload"]
# Check if the payload structure matches the expected format
# Assuming payload["event"]["event_name"] gives you the event type
event_name = payload["event"]["event_name"]
streamName = payload.get("stream", {}).get("name")
# Extract branchName for commit_update events from the "old" commit data
if event_name == "commit_update":
branchName = payload.get("event", {}).get("data", {}).get("old", {}).get("branchName")
else:
branchName = payload.get("event", {}).get("data", {}).get("commit", {}).get("branchName")
# List of valid event types
valid_event_types = ["commit_create", "commit_delete", "commit_update"]
if event_name in valid_event_types:
if streamName in config:
if branchName in config[streamName]:
stream_config = config[streamName][branchName]
should_continue = True
else:
print(f"Branch name {branchName} not found in config for stream {streamName}.")
else:
print(f"Stream name {streamName} not found in config.")
else:
print(f"Event type {event_name} is not one of the specified types.")
# If the flag is True, continue running the main part of the code
if should_continue:
config_entry = config[streamName][branchName]
STREAM_NAME = config_entry["STREAM_NAME"]
STREAM_ID = config_entry["STREAM_ID"]
SOURCE_BRANCH = config_entry["SOURCE_BRANCH"]
TARGET_BRANCH = config_entry["TARGET_BRANCH"]
UUID_COL = config_entry["UUID_COL"]
ATTR_METADATA = config_entry["ATTR_METADATA"]
KPI_METADATA = config_entry["KPI_METADATA"]
DEFAULT_ATTRIBUTES = config_entry["DEFAULT_ATTRIBUTES"]
# main
mainFunc(STREAM_ID, SOURCE_BRANCH, TARGET_BRANCH, UUID_COL, ATTR_METADATA, KPI_METADATA, DEFAULT_ATTRIBUTES)