serJD commited on
Commit
0b53d44
·
verified ·
1 Parent(s): d14b753

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -16
app.py CHANGED
@@ -31,10 +31,6 @@ config_file_path = os.path.join(current_directory, "config.json")
31
  with open(config_file_path, 'r') as f:
32
  config = json.load(f)
33
 
34
-
35
-
36
-
37
-
38
  # notion login
39
  speckle_token = os.environ.get("SPECKLE_TOKEN")
40
  notion_token = os.environ.get("NOTION_TOKEN")
@@ -47,10 +43,7 @@ CLIENT = SpeckleClient(host="https://speckle.xyz/")
47
  CLIENT.authenticate_with_token(token=speckle_token)
48
 
49
  # load config to variables
50
-
51
-
52
-
53
- def mainFunc(STREAM_ID, SOURCE_BRANCH, TARGET_BRANCH, UUID_COL, ATTR_METADATA, KPI_METADATA, DEFAULT_ATTRIBUTES, MAINDATA_KEY):
54
 
55
  if type(TARGET_BRANCH) == type([]):
56
  TARGET_BRANCH = SOURCE_BRANCH.replace(TARGET_BRANCH[0], TARGET_BRANCH[1])
@@ -123,15 +116,18 @@ def mainFunc(STREAM_ID, SOURCE_BRANCH, TARGET_BRANCH, UUID_COL, ATTR_METADATA, K
123
  log["avg_attrRemoved"] +=1
124
  log["removedAttr"].append(k)
125
 
126
-
 
 
 
127
  streamData_new.append(datanew)
128
 
129
  log["avg_attrRemoved"] = log["avg_attrRemoved"]/len(stream_data)
130
  log["removedAttr"] = list(set(log["removedAttr"]))
131
 
132
  stream_new = copy.deepcopy(stream)
133
-
134
- stream_new["@Data"][MAINDATA_KEY] = streamData_new
135
  #add additional data to stream
136
  stream_new["logs"] = json.dumps(log)
137
  stream_new["attributeMetaData"] = json.dumps(attributeMetaData)
@@ -141,6 +137,7 @@ def mainFunc(STREAM_ID, SOURCE_BRANCH, TARGET_BRANCH, UUID_COL, ATTR_METADATA, K
141
  # set stream and branch
142
  # Get transport
143
  transport = ServerTransport(client=CLIENT, stream_id=STREAM_ID)
 
144
  # Send the data object to the speckle stream
145
  object_id = operations.send(stream_new, [transport])
146
  time.sleep(2)
@@ -165,9 +162,9 @@ async def update_streams(request: Request):
165
 
166
  print("============= payload =============")
167
  print(payload)
168
- print("============= payload =============")
169
  print(config)
170
- print("============= config =============")
171
 
172
  payload = payload["payload"]
173
  # Check if the payload structure matches the expected format
@@ -198,7 +195,6 @@ async def update_streams(request: Request):
198
  # If the flag is True, continue running the main part of the code
199
  if should_continue:
200
  config_entry = config[streamName][branchName]
201
-
202
  STREAM_NAME = config_entry["STREAM_NAME"]
203
  STREAM_ID = config_entry["STREAM_ID"]
204
  SOURCE_BRANCH = config_entry["SOURCE_BRANCH"]
@@ -207,6 +203,7 @@ async def update_streams(request: Request):
207
  ATTR_METADATA = config_entry["ATTR_METADATA"]
208
  KPI_METADATA = config_entry["KPI_METADATA"]
209
  DEFAULT_ATTRIBUTES = config_entry["DEFAULT_ATTRIBUTES"]
210
- MAINDATA_KEY = config_entry.get("MAINDATA_KEY", "@{0}")
 
211
  # main
212
- mainFunc(STREAM_ID, SOURCE_BRANCH, TARGET_BRANCH, UUID_COL, ATTR_METADATA, KPI_METADATA, DEFAULT_ATTRIBUTES,MAINDATA_KEY)
 
31
  with open(config_file_path, 'r') as f:
32
  config = json.load(f)
33
 
 
 
 
 
34
  # notion login
35
  speckle_token = os.environ.get("SPECKLE_TOKEN")
36
  notion_token = os.environ.get("NOTION_TOKEN")
 
43
  CLIENT.authenticate_with_token(token=speckle_token)
44
 
45
  # load config to variables
46
+ def mainFunc(STREAM_ID, SOURCE_BRANCH, TARGET_BRANCH, UUID_COL, ATTR_METADATA, KPI_METADATA, DEFAULT_ATTRIBUTES, DATASET_NAME):
 
 
 
47
 
48
  if type(TARGET_BRANCH) == type([]):
49
  TARGET_BRANCH = SOURCE_BRANCH.replace(TARGET_BRANCH[0], TARGET_BRANCH[1])
 
116
  log["avg_attrRemoved"] +=1
117
  log["removedAttr"].append(k)
118
 
119
+ for obj in datanew.copy():
120
+ datanew["dataset":DATASET_NAME]
121
+
122
+
123
  streamData_new.append(datanew)
124
 
125
  log["avg_attrRemoved"] = log["avg_attrRemoved"]/len(stream_data)
126
  log["removedAttr"] = list(set(log["removedAttr"]))
127
 
128
  stream_new = copy.deepcopy(stream)
129
+ stream_new["@Data"]["@{0}"] = streamData_new
130
+
131
  #add additional data to stream
132
  stream_new["logs"] = json.dumps(log)
133
  stream_new["attributeMetaData"] = json.dumps(attributeMetaData)
 
137
  # set stream and branch
138
  # Get transport
139
  transport = ServerTransport(client=CLIENT, stream_id=STREAM_ID)
140
+
141
  # Send the data object to the speckle stream
142
  object_id = operations.send(stream_new, [transport])
143
  time.sleep(2)
 
162
 
163
  print("============= payload =============")
164
  print(payload)
165
+ print("============= config ==============")
166
  print(config)
167
+
168
 
169
  payload = payload["payload"]
170
  # Check if the payload structure matches the expected format
 
195
  # If the flag is True, continue running the main part of the code
196
  if should_continue:
197
  config_entry = config[streamName][branchName]
 
198
  STREAM_NAME = config_entry["STREAM_NAME"]
199
  STREAM_ID = config_entry["STREAM_ID"]
200
  SOURCE_BRANCH = config_entry["SOURCE_BRANCH"]
 
203
  ATTR_METADATA = config_entry["ATTR_METADATA"]
204
  KPI_METADATA = config_entry["KPI_METADATA"]
205
  DEFAULT_ATTRIBUTES = config_entry["DEFAULT_ATTRIBUTES"]
206
+ DATASET_NAME = config_entry.get("DATASET_NAME")
207
+
208
  # main
209
+ mainFunc(STREAM_ID, SOURCE_BRANCH, TARGET_BRANCH, UUID_COL, ATTR_METADATA, KPI_METADATA, DEFAULT_ATTRIBUTES,DATASET_NAME)