Spaces:
Runtime error
Runtime error
Upload 4 files
Browse files- app.py +204 -0
- config.json +15 -0
- requirements.txt +2 -0
- utils.py +264 -0
app.py
ADDED
@@ -0,0 +1,204 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import json
|
2 |
+
import re
|
3 |
+
import sys
|
4 |
+
import time
|
5 |
+
import copy
|
6 |
+
from notion_client import Client
|
7 |
+
|
8 |
+
from specklepy.api.client import SpeckleClient
|
9 |
+
from specklepy.api.credentials import get_default_account, get_local_accounts
|
10 |
+
from specklepy.transports.server import ServerTransport
|
11 |
+
from specklepy.api import operations
|
12 |
+
from specklepy.objects.geometry import Polyline, Point
|
13 |
+
from specklepy.objects import Base
|
14 |
+
import os
|
15 |
+
from functools import wraps
|
16 |
+
|
17 |
+
import gradio as gr
|
18 |
+
import requests
|
19 |
+
from huggingface_hub import webhook_endpoint, WebhookPayload
|
20 |
+
from fastapi import Request
|
21 |
+
import datetime
|
22 |
+
|
23 |
+
from huggingface_hub import WebhooksServer, WebhookPayload
|
24 |
+
import gradio as gr
|
25 |
+
|
26 |
+
|
27 |
+
from utils import *
|
28 |
+
|
29 |
+
current_directory = os.path.dirname(os.path.abspath(__file__))
|
30 |
+
# Path to the config.json file
|
31 |
+
config_file_path = os.path.join(current_directory, "config.json")
|
32 |
+
with open(config_file_path, 'r') as f:
|
33 |
+
config = json.load(f)
|
34 |
+
|
35 |
+
|
36 |
+
|
37 |
+
|
38 |
+
|
39 |
+
# notion login
|
40 |
+
speckle_token = os.environ.get("SPECKLE_TOKEN")
|
41 |
+
notion_token = os.environ.get("NOTION_TOKEN")
|
42 |
+
webhook_secret = os.environ.get("WEBHOOK_SECRET")
|
43 |
+
notion_token = notion_token
|
44 |
+
notion = Client(auth=notion_token)
|
45 |
+
|
46 |
+
# speckle
|
47 |
+
CLIENT = SpeckleClient(host="https://speckle.xyz/")
|
48 |
+
CLIENT.authenticate_with_token(token=speckle_token)
|
49 |
+
|
50 |
+
# load config to variables
|
51 |
+
|
52 |
+
|
53 |
+
|
54 |
+
def mainFunc(STREAM_ID, SOURCE_BRANCH, TARGET_BRANCH, UUID_COL, ATTR_METADATA, KPI_METADATA, DEFAULT_ATTRIBUTES):
|
55 |
+
|
56 |
+
if type(TARGET_BRANCH) == type([]):
|
57 |
+
TARGET_BRANCH = SOURCE_BRANCH.replace(TARGET_BRANCH[0], TARGET_BRANCH[1])
|
58 |
+
|
59 |
+
# get data from notion
|
60 |
+
databaseFUll_pages = fetch_all_database_pages(notion, ATTR_METADATA)
|
61 |
+
kpi_database_pages = fetch_all_database_pages(notion, KPI_METADATA)
|
62 |
+
|
63 |
+
# generate JSON files
|
64 |
+
attributeMetaData, availableAttributes = notionTable2JSON(databaseFUll_pages, kpi_database_pages)
|
65 |
+
|
66 |
+
# extract attribute/colum/feature names from notion table
|
67 |
+
attributesOfInterest = DEFAULT_ATTRIBUTES
|
68 |
+
for page in databaseFUll_pages:
|
69 |
+
pv = get_property_value(page, "name")
|
70 |
+
attributesOfInterest.append(pv)
|
71 |
+
if UUID_COL not in attributesOfInterest:
|
72 |
+
attributesOfInterest.append(UUID_COL)
|
73 |
+
|
74 |
+
|
75 |
+
# get speckle data
|
76 |
+
# get stream
|
77 |
+
stream = getSpeckleStream(STREAM_ID,
|
78 |
+
SOURCE_BRANCH,
|
79 |
+
CLIENT,
|
80 |
+
commit_id = "")
|
81 |
+
|
82 |
+
# navigate to list with speckle objects of interest
|
83 |
+
try:
|
84 |
+
stream_data = stream["@Data"]["@{0}"]
|
85 |
+
except:
|
86 |
+
print("something went wrong, try again with non-capital d")
|
87 |
+
try:
|
88 |
+
stream_data = stream["@data"]["@{0}"]
|
89 |
+
except:
|
90 |
+
print("check on speckle.com how to access the data")
|
91 |
+
|
92 |
+
|
93 |
+
# ======== assamble new stream data ============
|
94 |
+
|
95 |
+
streamData_new = []
|
96 |
+
log = {"removedDatapoints":0,"removedID":[], "avg_attrRemoved":0, "removedAttr":[]}
|
97 |
+
for i, obj in enumerate(stream_data):
|
98 |
+
objDict = obj.__dict__
|
99 |
+
# REMOVE DATA POINTS ==============================
|
100 |
+
# COND A: no landuses at all.
|
101 |
+
pass_flag = True
|
102 |
+
|
103 |
+
tot_sqm =0
|
104 |
+
for k, v in objDict.items():
|
105 |
+
if k.startswith("lu+"):
|
106 |
+
tot_sqm += float(v)
|
107 |
+
if tot_sqm <= 0:
|
108 |
+
pass_flag = False
|
109 |
+
|
110 |
+
|
111 |
+
if pass_flag == False :
|
112 |
+
log["removedDatapoints"] +=1
|
113 |
+
log["removedID"].append(objDict[UUID_COL])
|
114 |
+
continue
|
115 |
+
|
116 |
+
# REMOVE ATTRIBUTES ===============================
|
117 |
+
datanew = Base()
|
118 |
+
for k, v in objDict.items():
|
119 |
+
if k in attributesOfInterest:
|
120 |
+
datanew[k] = v
|
121 |
+
else:
|
122 |
+
log["avg_attrRemoved"] +=1
|
123 |
+
log["removedAttr"].append(k)
|
124 |
+
|
125 |
+
|
126 |
+
streamData_new.append(datanew)
|
127 |
+
|
128 |
+
log["avg_attrRemoved"] = log["avg_attrRemoved"]/len(stream_data)
|
129 |
+
log["removedAttr"] = list(set(log["removedAttr"]))
|
130 |
+
|
131 |
+
stream_new = copy.deepcopy(stream)
|
132 |
+
|
133 |
+
stream_new["@Data"]["@{0}"] = streamData_new
|
134 |
+
#add additional data to stream
|
135 |
+
stream_new["logs"] = json.dumps(log)
|
136 |
+
stream_new["attributeMetaData"] = json.dumps(attributeMetaData)
|
137 |
+
stream_new["availableAttributes"] = json.dumps(availableAttributes)
|
138 |
+
|
139 |
+
|
140 |
+
# set stream and branch
|
141 |
+
# Get transport
|
142 |
+
transport = ServerTransport(client=CLIENT, stream_id=STREAM_ID)
|
143 |
+
# Send the data object to the speckle stream
|
144 |
+
object_id = operations.send(stream_new, [transport])
|
145 |
+
|
146 |
+
# Create a new commit with the new object
|
147 |
+
commit_id = CLIENT.commit.create(
|
148 |
+
STREAM_ID,
|
149 |
+
object_id= object_id,
|
150 |
+
message="auto commit, removed datapoints: " + str( log["removedDatapoints"]) + "avg. removed attributes: "+ str(log["avg_attrRemoved"]),
|
151 |
+
branch_name=TARGET_BRANCH,
|
152 |
+
)
|
153 |
+
|
154 |
+
print(commit_id)
|
155 |
+
|
156 |
+
|
157 |
+
|
158 |
+
@webhook_endpoint
|
159 |
+
async def update_streams(request: Request):
|
160 |
+
# Initialize flag
|
161 |
+
should_continue = False
|
162 |
+
|
163 |
+
# Read the request body as JSON
|
164 |
+
payload = await request.json()
|
165 |
+
|
166 |
+
print("============= payload =============")
|
167 |
+
print(payload)
|
168 |
+
print("============= payload =============")
|
169 |
+
|
170 |
+
payload = payload["payload"]
|
171 |
+
# Check if the payload structure matches the expected format
|
172 |
+
if "event" in payload and "data" in payload["event"]:
|
173 |
+
event_data = payload["event"]["data"]
|
174 |
+
|
175 |
+
# Check if the event type is one of the specified types
|
176 |
+
if "event_name" in payload["event"] and payload["event"]["event_name"] in ["commit_create", "commit_delete", "commit_update"]:
|
177 |
+
# Check if the stream name matches the specified list
|
178 |
+
if "stream" in payload and payload["stream"]["name"] in config.keys():
|
179 |
+
streamName = payload["stream"]["name"]
|
180 |
+
# Check if the branch name matches the specified list
|
181 |
+
if "commit" in event_data and "branchName" in event_data["commit"]:
|
182 |
+
branchName = event_data["commit"]["branchName"]
|
183 |
+
# Attempt to load the configuration for the stream and branch
|
184 |
+
try:
|
185 |
+
stream_config = config[streamName][branchName]
|
186 |
+
should_continue = True
|
187 |
+
# You can now use stream_config for further processing
|
188 |
+
except KeyError:
|
189 |
+
print(f"Configuration for stream {streamName} and branch {branchName} not found.")
|
190 |
+
else:
|
191 |
+
print("Branch name not found in payload.")
|
192 |
+
else:
|
193 |
+
print("Stream name not found or not in the specified list.")
|
194 |
+
else:
|
195 |
+
print("Event type is not one of the specified types.")
|
196 |
+
else:
|
197 |
+
print("Payload structure does not match the expected format.")
|
198 |
+
|
199 |
+
# If the flag is True, continue running the main part of the code
|
200 |
+
if should_continue:
|
201 |
+
locals().update(config[streamName, branchName])
|
202 |
+
|
203 |
+
# Your main code logic goes here
|
204 |
+
mainFunc(STREAM_ID, SOURCE_BRANCH, TARGET_BRANCH, UUID_COL, ATTR_METADATA, KPI_METADATA, DEFAULT_ATTRIBUTES)
|
config.json
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"activeSetting": "2BU_100",
|
3 |
+
"2BU_100":
|
4 |
+
{
|
5 |
+
"STREAM_ID":"ebcfc50abe",
|
6 |
+
"SOURCE_BRANCH": "analysis_results/default+activity_node_analysis",
|
7 |
+
"TARGET_BRANCH": ["analysis_results", "dashboard"],
|
8 |
+
"UUID_COL": "ids",
|
9 |
+
"ATTR_METADATA": "ec2a636f079d4d7686f94901b6238242",
|
10 |
+
"KPI_METADATA": "c5e00b7b51504e5a9326befb9c54671c",
|
11 |
+
"DEFAULT_ATTRIBUTES":["@geometry","@Geometry"]
|
12 |
+
}
|
13 |
+
|
14 |
+
|
15 |
+
}
|
requirements.txt
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
specklepy
|
2 |
+
notion-client
|
utils.py
ADDED
@@ -0,0 +1,264 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
import json
|
3 |
+
import re
|
4 |
+
from specklepy.transports.server import ServerTransport
|
5 |
+
from specklepy.api import operations
|
6 |
+
|
7 |
+
def get_database_properties(database_pages):
|
8 |
+
# Iterate through the results (each page corresponds to a row in the database)
|
9 |
+
propList = []
|
10 |
+
for page in database_pages:
|
11 |
+
# Print the name and type of each property in this page
|
12 |
+
for prop_name, prop_data in page['properties'].items():
|
13 |
+
prop_type = prop_data['type']
|
14 |
+
propList.append(prop_name)
|
15 |
+
break
|
16 |
+
return propList
|
17 |
+
|
18 |
+
# query full database
|
19 |
+
def fetch_all_database_pages(client, database_id):
|
20 |
+
"""
|
21 |
+
Fetches all pages from a specified Notion database.
|
22 |
+
|
23 |
+
:param client: Initialized Notion client.
|
24 |
+
:param database_id: The ID of the Notion database to query.
|
25 |
+
:return: A list containing all pages from the database.
|
26 |
+
"""
|
27 |
+
start_cursor = None
|
28 |
+
all_pages = []
|
29 |
+
|
30 |
+
while True:
|
31 |
+
response = client.databases.query(
|
32 |
+
**{
|
33 |
+
"database_id": database_id,
|
34 |
+
"start_cursor": start_cursor
|
35 |
+
}
|
36 |
+
)
|
37 |
+
|
38 |
+
all_pages.extend(response['results'])
|
39 |
+
|
40 |
+
# Check if there's more data to fetch
|
41 |
+
if response['has_more']:
|
42 |
+
start_cursor = response['next_cursor']
|
43 |
+
else:
|
44 |
+
break
|
45 |
+
|
46 |
+
return all_pages
|
47 |
+
|
48 |
+
|
49 |
+
def get_property_value(page, property_name):
|
50 |
+
"""
|
51 |
+
Extracts the value from a specific property in a Notion page based on its type.
|
52 |
+
:param page: The Notion page data as retrieved from the API.
|
53 |
+
:param property_name: The name of the property whose value is to be fetched.
|
54 |
+
:return: The value or values contained in the specified property, depending on type.
|
55 |
+
"""
|
56 |
+
# Check if the property exists in the page
|
57 |
+
if property_name not in page['properties']:
|
58 |
+
return None # or raise an error if you prefer
|
59 |
+
|
60 |
+
property_data = page['properties'][property_name]
|
61 |
+
prop_type = property_data['type']
|
62 |
+
|
63 |
+
# Handle 'title' and 'rich_text' types
|
64 |
+
if prop_type in ['title', 'rich_text']:
|
65 |
+
return ''.join(text_block['text']['content'] for text_block in property_data[prop_type])
|
66 |
+
|
67 |
+
# Handle 'number' type
|
68 |
+
elif prop_type == 'number':
|
69 |
+
return property_data[prop_type]
|
70 |
+
|
71 |
+
# Handle 'select' type
|
72 |
+
elif prop_type == 'select':
|
73 |
+
return property_data[prop_type]['name'] if property_data[prop_type] else None
|
74 |
+
|
75 |
+
# Handle 'multi_select' type
|
76 |
+
elif prop_type == 'multi_select':
|
77 |
+
return [option['name'] for option in property_data[prop_type]]
|
78 |
+
|
79 |
+
# Handle 'date' type
|
80 |
+
elif prop_type == 'date':
|
81 |
+
if property_data[prop_type]['end']:
|
82 |
+
return (property_data[prop_type]['start'], property_data[prop_type]['end'])
|
83 |
+
else:
|
84 |
+
return property_data[prop_type]['start']
|
85 |
+
|
86 |
+
# Handle 'relation' type
|
87 |
+
elif prop_type == 'relation':
|
88 |
+
return [relation['id'] for relation in property_data[prop_type]]
|
89 |
+
|
90 |
+
# Handle 'people' type
|
91 |
+
elif prop_type == 'people':
|
92 |
+
return [person['name'] for person in property_data[prop_type] if 'name' in person]
|
93 |
+
|
94 |
+
# Add more handlers as needed for other property types
|
95 |
+
|
96 |
+
else:
|
97 |
+
# Return None or raise an error for unsupported property types
|
98 |
+
return None
|
99 |
+
|
100 |
+
|
101 |
+
def parse_invalid_json(json_string):
|
102 |
+
if json_string == None or json_string == "none":
|
103 |
+
return None
|
104 |
+
# Replace fancy quotes and single quotes with standard double quotes
|
105 |
+
json_string = re.sub(r"[ββββ]", '"', json_string)
|
106 |
+
json_string = json_string.replace("'", '"')
|
107 |
+
|
108 |
+
# Add quotes around any unquoted keys
|
109 |
+
json_string = re.sub(r'(?<!")(\b\w+\b)(?!"):', r'"\1":', json_string)
|
110 |
+
|
111 |
+
# Handle unquoted numeric values or booleans if necessary
|
112 |
+
# This part can be customized based on specific requirements
|
113 |
+
|
114 |
+
try:
|
115 |
+
# Try parsing the corrected string
|
116 |
+
return json.loads(json_string)
|
117 |
+
except json.JSONDecodeError as e:
|
118 |
+
# Handle parsing error (or re-raise the exception)
|
119 |
+
print("JSON parsing error:", e)
|
120 |
+
print(json_string)
|
121 |
+
return None
|
122 |
+
|
123 |
+
def notionTable2JSON(databaseFUll_pages, kpi_database_pages):
|
124 |
+
attributeMetaData = {}
|
125 |
+
availableAttributes = {}
|
126 |
+
cnt = 0
|
127 |
+
for page in databaseFUll_pages:
|
128 |
+
attributeData = {
|
129 |
+
"name": get_property_value(page, "name"),
|
130 |
+
"nameShort": get_property_value(page, "nameShort"),
|
131 |
+
"nameLong": get_property_value(page, "nameLong"),
|
132 |
+
"description": get_property_value(page, "description"),
|
133 |
+
"indicator": get_property_value(page, "indicator"),
|
134 |
+
"unit": get_property_value(page, "unit"),
|
135 |
+
"unitShort": get_property_value(page, "unitShort"),
|
136 |
+
"spatialUnit": get_property_value(page, "spatialUnit"),
|
137 |
+
"method": get_property_value(page, "method"),
|
138 |
+
"type": get_property_value(page, "type"),
|
139 |
+
"colorMapping": parse_invalid_json(get_property_value(page, "colorMapping")),
|
140 |
+
"parameter": parse_invalid_json(get_property_value(page, "parameter")),
|
141 |
+
"level_1": get_property_value(page, "level_1"),
|
142 |
+
"level_2": get_property_value(page, "level_2"),
|
143 |
+
"level_3": get_property_value(page, "level_3"),
|
144 |
+
|
145 |
+
"dataSet": "ActivityNodes",
|
146 |
+
"dataSource": "",
|
147 |
+
|
148 |
+
"KPI": [],
|
149 |
+
"visualisation": []
|
150 |
+
}
|
151 |
+
curAttrName = get_property_value(page, "name")
|
152 |
+
print(curAttrName)
|
153 |
+
lev1 = get_property_value(page, "level_1")
|
154 |
+
lev2 = get_property_value(page, "level_2")
|
155 |
+
lev3= get_property_value(page, "level_3")
|
156 |
+
|
157 |
+
if lev1 != None and lev1 != "NA":
|
158 |
+
if lev1 not in availableAttributes:
|
159 |
+
availableAttributes[lev1] = {"sub-levels": {}, "values": []}
|
160 |
+
|
161 |
+
if lev2 != None and lev2 != "NA":
|
162 |
+
if lev2 not in availableAttributes[lev1]["sub-levels"]:
|
163 |
+
availableAttributes[lev1]["sub-levels"][lev2] = {"sub-levels": {}, "values": []}
|
164 |
+
|
165 |
+
if lev3 != None and lev3 != "NA":
|
166 |
+
if lev3 not in availableAttributes[lev1]["sub-levels"][lev2]["sub-levels"]:
|
167 |
+
availableAttributes[lev1]["sub-levels"][lev2]["sub-levels"][lev3] = {"values": []}
|
168 |
+
availableAttributes[lev1]["sub-levels"][lev2]["sub-levels"][lev3]["values"].append(curAttrName)
|
169 |
+
else:
|
170 |
+
availableAttributes[lev1]["sub-levels"][lev2]["values"].append(curAttrName)
|
171 |
+
else:
|
172 |
+
availableAttributes[lev1]["values"].append(curAttrName)
|
173 |
+
|
174 |
+
# iterated through list of KPI ref. Ids
|
175 |
+
kpiIDs = get_property_value(page, "KPI")
|
176 |
+
for kpiID in kpiIDs:
|
177 |
+
|
178 |
+
curKPI = get_page_by_id(kpi_database_pages, kpiID)
|
179 |
+
KPI_template ={
|
180 |
+
"name":get_property_value(curKPI, "name"),
|
181 |
+
"type":get_property_value(curKPI, "type"),
|
182 |
+
"unit":get_property_value(curKPI, "unit"),
|
183 |
+
"color":parse_invalid_json(get_property_value(curKPI, "color")),
|
184 |
+
"nameShort": get_property_value(curKPI, "nameShort"),
|
185 |
+
"quality":get_property_value(curKPI, "quality"),
|
186 |
+
"args": parse_invalid_json(get_property_value(curKPI, "args")),
|
187 |
+
"description":get_property_value(curKPI, "description"),
|
188 |
+
"interpretrationHigh":get_property_value(curKPI, "interpretrationHigh"),
|
189 |
+
"interpretationLow":get_property_value(curKPI, "interpretationLow"),
|
190 |
+
}
|
191 |
+
# add KPI data to attributeData
|
192 |
+
attributeData["KPI"].append(KPI_template)
|
193 |
+
|
194 |
+
# add to main dictioanry
|
195 |
+
attributeMetaData[get_property_value(page, "name")] = attributeData
|
196 |
+
|
197 |
+
print("processed pages:", cnt)
|
198 |
+
return attributeMetaData, availableAttributes
|
199 |
+
|
200 |
+
|
201 |
+
def get_page_by_id(notion_db_pages, page_id):
|
202 |
+
for pg in notion_db_pages:
|
203 |
+
if pg["id"] == page_id:
|
204 |
+
return pg
|
205 |
+
|
206 |
+
def getSpeckleStream(stream_id,
|
207 |
+
branch_name,
|
208 |
+
client,
|
209 |
+
commit_id=""
|
210 |
+
):
|
211 |
+
"""
|
212 |
+
Retrieves data from a specific branch of a speckle stream.
|
213 |
+
|
214 |
+
Args:
|
215 |
+
stream_id (str): The ID of the speckle stream.
|
216 |
+
branch_name (str): The name of the branch within the speckle stream.
|
217 |
+
client (specklepy.api.client.Client, optional): A speckle client. Defaults to a global `client`.
|
218 |
+
commit_id (str): id of a commit, if nothing is specified, the latest commit will be fetched
|
219 |
+
|
220 |
+
Returns:
|
221 |
+
dict: The speckle stream data received from the specified branch.
|
222 |
+
|
223 |
+
This function retrieves the last commit from a specific branch of a speckle stream.
|
224 |
+
It uses the provided speckle client to get the branch and commit information, and then
|
225 |
+
retrieves the speckle stream data associated with the last commit.
|
226 |
+
It prints out the branch details and the creation dates of the last three commits for debugging purposes.
|
227 |
+
"""
|
228 |
+
|
229 |
+
print("updated A")
|
230 |
+
|
231 |
+
# set stream and branch
|
232 |
+
try:
|
233 |
+
branch = client.branch.get(stream_id, branch_name, 3)
|
234 |
+
print(branch)
|
235 |
+
except:
|
236 |
+
branch = client.branch.get(stream_id, branch_name, 1)
|
237 |
+
print(branch)
|
238 |
+
|
239 |
+
print("last three commits:")
|
240 |
+
[print(ite.createdAt) for ite in branch.commits.items]
|
241 |
+
|
242 |
+
if commit_id == "":
|
243 |
+
latest_commit = branch.commits.items[0]
|
244 |
+
choosen_commit_id = latest_commit.id
|
245 |
+
commit = client.commit.get(stream_id, choosen_commit_id)
|
246 |
+
print("latest commit ", branch.commits.items[0].createdAt, " was choosen")
|
247 |
+
elif type(commit_id) == type("s"): # string, commit uuid
|
248 |
+
choosen_commit_id = commit_id
|
249 |
+
commit = client.commit.get(stream_id, choosen_commit_id)
|
250 |
+
print("provided commit ", choosen_commit_id, " was choosen")
|
251 |
+
elif type(commit_id) == type(1): #int
|
252 |
+
latest_commit = branch.commits.items[commit_id]
|
253 |
+
choosen_commit_id = latest_commit.id
|
254 |
+
commit = client.commit.get(stream_id, choosen_commit_id)
|
255 |
+
|
256 |
+
|
257 |
+
print(commit)
|
258 |
+
print(commit.referencedObject)
|
259 |
+
# get transport
|
260 |
+
transport = ServerTransport(client=client, stream_id=stream_id)
|
261 |
+
#speckle stream
|
262 |
+
res = operations.receive(commit.referencedObject, transport)
|
263 |
+
|
264 |
+
return res
|