nastasiasnk commited on
Commit
cb65773
·
verified ·
1 Parent(s): be8fbbb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -54
app.py CHANGED
@@ -16,9 +16,9 @@ from notion_client import Client as client_notion
16
 
17
  notionToken = os.getenv('notionToken')
18
  if notionToken is None:
19
- raise Exception("Secret token not found. Please check the environment variables.")
20
  else:
21
- print("Secret token found successfully!")
22
 
23
  from config import landuseDatabaseId , subdomainAttributesDatabaseId
24
  from imports_utils import fetch_all_database_pages
@@ -28,6 +28,7 @@ from imports_utils import notion
28
  landuse_attributes = fetch_all_database_pages(notion, landuseDatabaseId)
29
  livability_attributes = fetch_all_database_pages(notion, subdomainAttributesDatabaseId)
30
 
 
31
  # fetch the dictionary with landuse - domain pairs
32
  landuseMapperDict ={}
33
  subdomains_unique = []
@@ -40,7 +41,6 @@ for page in landuse_attributes:
40
  if value_subdomain != "":
41
  subdomains_unique.append(value_subdomain)
42
 
43
- #subdomains_unique = list(set(subdomains_unique))
44
 
45
 
46
  # fetch the dictionary with subdomain attribute data
@@ -55,7 +55,7 @@ for page in livability_attributes:
55
  domain = get_property_value(page, "DOMAIN")
56
  if thresholds:
57
  attributeMapperDict[subdomain] = {
58
- 'sqmPerEmpl': [sqm_per_employee if sqm_per_employee != "" else 0],
59
  'thresholds': thresholds,
60
  'max_points': max_points,
61
  'domain': [domain if domain != "" else 0]
@@ -63,29 +63,12 @@ for page in livability_attributes:
63
  if domain != "":
64
  domains_unique.append(domain)
65
 
66
- #domains_unique = list(set(domains_unique))
67
 
68
 
69
 
70
  # ---------------------- Accessing data from Speckle ---------------------- #
71
 
72
 
73
- """
74
- from specklepy.api.client import Client as SpeckleClient
75
- from specklepy.api.credentials import get_default_account
76
-
77
- # Example usage
78
-
79
- client = Client(host="your_speckle_server_host")
80
- account = get_default_account()
81
- client.authenticate(token=account.token)
82
-
83
-
84
- CLIENT = SpeckleClient(host="https://speckle.xyz/")
85
- CLIENT.authenticate_with_token(token=userdata.get('speckleToken'))
86
- """
87
-
88
- #import other libaries
89
  from specklepy.api.client import SpeckleClient
90
  from specklepy.api.credentials import get_default_account, get_local_accounts
91
  from specklepy.transports.server import ServerTransport
@@ -93,53 +76,27 @@ from specklepy.api import operations
93
  from specklepy.objects.geometry import Polyline, Point
94
  from specklepy.objects import Base
95
 
96
-
97
-
98
-
99
  import imports_utils
100
-
101
  import speckle_utils
102
  import data_utils
103
 
104
  from config import landuseDatabaseId , streamId, branch_name_dm, commit_id_dm
105
- #from imports_utils import streamMatrices
106
  from imports_utils import speckleToken
107
  from imports_utils import fetchDistanceMatrices
108
  from config import distanceMatrixActivityNodes
109
  from config import distanceMatrixTransportStops
110
 
111
-
112
-
113
-
114
  CLIENT = SpeckleClient(host="https://speckle.xyz/")
115
  account = get_default_account()
116
  CLIENT.authenticate_with_token(token=speckleToken)
117
 
118
-
119
  streamDistanceMatrices = speckle_utils.getSpeckleStream(streamId,branch_name_dm,CLIENT, commit_id_dm)
120
  matrices = fetchDistanceMatrices (streamDistanceMatrices)
121
  df_dm = matrices[distanceMatrixActivityNodes]
122
  df_dm_transport = matrices[distanceMatrixTransportStops]
123
 
124
-
125
  dm_dictionary = df_dm.to_dict('index')
126
-
127
- """
128
- import logging
129
-
130
- # Set up basic configuration for logging
131
- logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
132
-
133
- # Log detailed debug information at critical steps or just before a line where you suspect the error might occur
134
- logging.debug('About to start a potentially problematic operation...')
135
- try:
136
- # Potentially problematic code
137
-
138
-
139
- except Exception as e:
140
- logging.error(f"An error occurred: {e}")
141
- raise
142
- """
143
 
144
 
145
 
@@ -158,6 +115,7 @@ def test(input_json):
158
  matrix = inputs['input']["matrix"]
159
  landuses = inputs['input']["landuse_areas"]
160
  transport_matrix = inputs['input']["transportMatrix"]
 
161
  #attributeMapperDict = inputs['input']["attributeMapperDict"]
162
  #landuseMapperDict = inputs['input']["landuseMapperDict"]
163
 
@@ -178,12 +136,10 @@ def test(input_json):
178
  tranportModes = ["DRT", "GMT", "HSR"]
179
 
180
  def split_dict_by_subkey(original_dict, substrings):
181
- # Initialize dictionaries for each substring
182
  result_dicts = {substring: {} for substring in substrings}
183
 
184
  for key, nested_dict in original_dict.items():
185
  for subkey, value in nested_dict.items():
186
- # Check each substring if it's in the subkey
187
  for substring in substrings:
188
  if substring in subkey:
189
  if key not in result_dicts[substring]:
@@ -198,10 +154,8 @@ def test(input_json):
198
  art_dict = result_dicts["DRT"]
199
  gmt_dict = result_dicts["GMT"]
200
 
201
-
202
  df_art_matrix = pd.DataFrame(art_dict).T
203
  df_art_matrix = df_art_matrix.round(0).astype(int)
204
-
205
  df_gmt_matrix = pd.DataFrame(gmt_dict).T
206
  df_gmt_matrix = df_art_matrix.round(0).astype(int)
207
 
@@ -277,7 +231,7 @@ def test(input_json):
277
 
278
  for subdomain in UniqueSubdomainsList:
279
  for key, value_list in SubdomainAttributeDict.items():
280
- sqm_per_empl = float(SubdomainAttributeDict[subdomain]['sqmPerEmpl'][0])
281
  if key in destinationWeights.columns and key == subdomain:
282
  if sqm_per_empl > 0:
283
  df_LivabilitySubdomainsWorkplaces['jobs'] += (round(destinationWeights[key] / sqm_per_empl,2)).fillna(0)
@@ -393,7 +347,8 @@ def test(input_json):
393
  "subdomainsWeights_dictionary": LivabilitySubdomainsInputs_dictionary,
394
  "luDomainMapper": landuseMapperDict,
395
  "attributeMapper": attributeMapperDict,
396
- "fetchDm": dm_dictionary
 
397
  }
398
 
399
 
 
16
 
17
  notionToken = os.getenv('notionToken')
18
  if notionToken is None:
19
+ raise Exception("Notion token not found. Please check the environment variables.")
20
  else:
21
+ print("Notion token found successfully!")
22
 
23
  from config import landuseDatabaseId , subdomainAttributesDatabaseId
24
  from imports_utils import fetch_all_database_pages
 
28
  landuse_attributes = fetch_all_database_pages(notion, landuseDatabaseId)
29
  livability_attributes = fetch_all_database_pages(notion, subdomainAttributesDatabaseId)
30
 
31
+
32
  # fetch the dictionary with landuse - domain pairs
33
  landuseMapperDict ={}
34
  subdomains_unique = []
 
41
  if value_subdomain != "":
42
  subdomains_unique.append(value_subdomain)
43
 
 
44
 
45
 
46
  # fetch the dictionary with subdomain attribute data
 
55
  domain = get_property_value(page, "DOMAIN")
56
  if thresholds:
57
  attributeMapperDict[subdomain] = {
58
+ 'sqmPerEmpl': sqm_per_employee if sqm_per_employee != "" else 0,
59
  'thresholds': thresholds,
60
  'max_points': max_points,
61
  'domain': [domain if domain != "" else 0]
 
63
  if domain != "":
64
  domains_unique.append(domain)
65
 
 
66
 
67
 
68
 
69
  # ---------------------- Accessing data from Speckle ---------------------- #
70
 
71
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
72
  from specklepy.api.client import SpeckleClient
73
  from specklepy.api.credentials import get_default_account, get_local_accounts
74
  from specklepy.transports.server import ServerTransport
 
76
  from specklepy.objects.geometry import Polyline, Point
77
  from specklepy.objects import Base
78
 
 
 
 
79
  import imports_utils
 
80
  import speckle_utils
81
  import data_utils
82
 
83
  from config import landuseDatabaseId , streamId, branch_name_dm, commit_id_dm
 
84
  from imports_utils import speckleToken
85
  from imports_utils import fetchDistanceMatrices
86
  from config import distanceMatrixActivityNodes
87
  from config import distanceMatrixTransportStops
88
 
 
 
 
89
  CLIENT = SpeckleClient(host="https://speckle.xyz/")
90
  account = get_default_account()
91
  CLIENT.authenticate_with_token(token=speckleToken)
92
 
 
93
  streamDistanceMatrices = speckle_utils.getSpeckleStream(streamId,branch_name_dm,CLIENT, commit_id_dm)
94
  matrices = fetchDistanceMatrices (streamDistanceMatrices)
95
  df_dm = matrices[distanceMatrixActivityNodes]
96
  df_dm_transport = matrices[distanceMatrixTransportStops]
97
 
 
98
  dm_dictionary = df_dm.to_dict('index')
99
+ df_dm_transport_dictionary = df_dm_transport.to_dict('index')
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
100
 
101
 
102
 
 
115
  matrix = inputs['input']["matrix"]
116
  landuses = inputs['input']["landuse_areas"]
117
  transport_matrix = inputs['input']["transportMatrix"]
118
+
119
  #attributeMapperDict = inputs['input']["attributeMapperDict"]
120
  #landuseMapperDict = inputs['input']["landuseMapperDict"]
121
 
 
136
  tranportModes = ["DRT", "GMT", "HSR"]
137
 
138
  def split_dict_by_subkey(original_dict, substrings):
 
139
  result_dicts = {substring: {} for substring in substrings}
140
 
141
  for key, nested_dict in original_dict.items():
142
  for subkey, value in nested_dict.items():
 
143
  for substring in substrings:
144
  if substring in subkey:
145
  if key not in result_dicts[substring]:
 
154
  art_dict = result_dicts["DRT"]
155
  gmt_dict = result_dicts["GMT"]
156
 
 
157
  df_art_matrix = pd.DataFrame(art_dict).T
158
  df_art_matrix = df_art_matrix.round(0).astype(int)
 
159
  df_gmt_matrix = pd.DataFrame(gmt_dict).T
160
  df_gmt_matrix = df_art_matrix.round(0).astype(int)
161
 
 
231
 
232
  for subdomain in UniqueSubdomainsList:
233
  for key, value_list in SubdomainAttributeDict.items():
234
+ sqm_per_empl = float(SubdomainAttributeDict[subdomain]['sqmPerEmpl')]#[0])
235
  if key in destinationWeights.columns and key == subdomain:
236
  if sqm_per_empl > 0:
237
  df_LivabilitySubdomainsWorkplaces['jobs'] += (round(destinationWeights[key] / sqm_per_empl,2)).fillna(0)
 
347
  "subdomainsWeights_dictionary": LivabilitySubdomainsInputs_dictionary,
348
  "luDomainMapper": landuseMapperDict,
349
  "attributeMapper": attributeMapperDict,
350
+ "fetchDm": df_dm_dictionary,
351
+ "transportDm":df_dm_transport_dictionary
352
  }
353
 
354