Spaces:
Sleeping
Sleeping
nastasiasnk
commited on
Update imports_utils.py
Browse files- imports_utils.py +48 -28
imports_utils.py
CHANGED
@@ -174,36 +174,56 @@ def fetchSubdomainMapper (livabilityAttributePages):
|
|
174 |
|
175 |
|
176 |
|
177 |
-
def fetchDistanceMatrices (
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
178 |
|
179 |
-
nested_list = []
|
180 |
-
|
181 |
-
# navigate to list with speckle objects of interest
|
182 |
-
distance_matrices = {}
|
183 |
-
for distM in stream_distance_matrices["@Data"]['@{0}']:
|
184 |
-
for kk in distM.__dict__.keys():
|
185 |
-
try:
|
186 |
-
if kk.split("+")[1].startswith("distance_matrix"):
|
187 |
-
distance_matrix_dict = json.loads(distM[kk])
|
188 |
-
origin_ids = distance_matrix_dict["origin_uuid"]
|
189 |
-
destination_ids = distance_matrix_dict["destination_uuid"]
|
190 |
-
distance_matrix = distance_matrix_dict["matrix"]
|
191 |
-
# Convert the distance matrix to a DataFrame
|
192 |
-
chunks = distance_matrix_dict["chunks"]
|
193 |
-
for chunk in chunks:
|
194 |
-
for row_obj in chunk['@rows']:
|
195 |
-
row = row_obj['@row']
|
196 |
-
nested_list.append(row)
|
197 |
-
|
198 |
-
df_distances = pd.DataFrame(distance_matrix, index=origin_ids, columns=destination_ids)
|
199 |
-
|
200 |
-
# i want to add the index & colum names to dist_m_csv
|
201 |
-
#distance_matrices[kk] = dist_m_csv[kk]
|
202 |
-
distance_matrices[kk] = df_distances
|
203 |
-
except:
|
204 |
-
pass
|
205 |
|
206 |
-
|
207 |
|
208 |
"""
|
209 |
|
|
|
174 |
|
175 |
|
176 |
|
177 |
+
def fetchDistanceMatrices (streamObj):
|
178 |
+
|
179 |
+
matrices = {}
|
180 |
+
isDict = False
|
181 |
+
|
182 |
+
try:
|
183 |
+
data_part = streamObj["@Data"]["@{0}"]
|
184 |
+
for matrix in data_part:
|
185 |
+
# Find the matrix name
|
186 |
+
matrix_name = next((attr for attr in dir(matrix) if "matrix" in attr), None)
|
187 |
+
|
188 |
+
if not matrix_name:
|
189 |
+
continue
|
190 |
+
|
191 |
+
matrix_data = getattr(matrix, matrix_name)
|
192 |
+
originUUID = matrix_data["@originUUID"]
|
193 |
+
destinationUUID = matrix_data["@destinationUUID"]
|
194 |
+
|
195 |
+
processed_rows = []
|
196 |
+
for chunk in matrix_data["@chunks"]:
|
197 |
+
for row in chunk["@rows"]:
|
198 |
+
processed_rows.append(row["@row"])
|
199 |
+
|
200 |
+
matrix_array = np.array(processed_rows)
|
201 |
+
matrix_df = pd.DataFrame(matrix_array, index=originUUID, columns=destinationUUID)
|
202 |
+
matrices[matrix_name] = matrix_df
|
203 |
+
except KeyError:
|
204 |
+
data_part = streamObj["@Data"].__dict__
|
205 |
+
print(data_part.keys())
|
206 |
+
|
207 |
+
for k, v in data_part.items():
|
208 |
+
if "matrix" in k:
|
209 |
+
matrix_name = k
|
210 |
+
matrix_data = v
|
211 |
+
originUUID = matrix_data["@originUUID"]
|
212 |
+
destinationUUID = matrix_data["@destinationUUID"]
|
213 |
+
|
214 |
+
processed_rows = []
|
215 |
+
for chunk in matrix_data["@chunks"]:
|
216 |
+
for row in chunk["@rows"]:
|
217 |
+
processed_rows.append(row["@row"])
|
218 |
+
|
219 |
+
matrix_array = np.array(processed_rows)
|
220 |
+
matrix_df = pd.DataFrame(matrix_array, index=originUUID, columns=destinationUUID)
|
221 |
+
matrices[matrix_name] = matrix_df
|
222 |
+
|
223 |
+
return matrices
|
224 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
225 |
|
226 |
+
|
227 |
|
228 |
"""
|
229 |
|