Spaces:
Sleeping
Sleeping
File size: 7,529 Bytes
1997c01 d189e4c c1b14f2 3b30238 6dffaaa fc47506 6bbc16e 4eb5c38 41a489a 0577c6a 4f355f4 f6965f4 4eb5c38 725afdf 4f355f4 725afdf 02f44d0 79b6783 fc47506 3b30238 d22b489 83bc4a4 d1715e3 83bc4a4 91d17a9 f252c3c 91d17a9 af1b241 f6965f4 725afdf 2a4109d 725afdf f6965f4 725afdf a029712 f6965f4 552f08c 1997c01 6866b1f 7260f6e 51b1074 e4cab2d 63a6362 a029712 9f82cae f628c0d a029712 63a6362 ae8a3b2 63a6362 a029712 63a6362 8a12b0f 63a6362 ae8a3b2 63a6362 87390db a029712 63a6362 a029712 4f355f4 2f56233 d96001e 6f24628 8295ca3 6f24628 3ea2200 7260f6e 9f66294 6f24628 a029712 7260f6e c945acc b2048d5 117196b 68472bb a2eef60 63a6362 a2eef60 1997c01 117196b 725afdf 8e986e1 a2eef60 b236779 1997c01 83cd13d 19ae57e 1997c01 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 |
import gradio as gr
import pandas as pd
import numpy as np
import json
from io import StringIO
from collections import OrderedDict
import os
# ---------------------- Accessing data from Notion ---------------------- #
from notion_client import Client as client_notion
from imports_utils import fetch_all_database_pages
from imports_utils import get_property_value
from imports_utils import getDataFromNotion
from imports_utils import notionToken
from config import useNotionData
from config import landuseDatabaseId , subdomainAttributesDatabaseId
from config import landuseColumnName
from config import subdomainColumnName
from config import sqmPerEmployeeColumnName
from config import thresholdsColumnName
from config import maxPointsColumnName
from config import domainColumnName
if notionToken is None:
raise Exception("Notion token not found. Please check the environment variables.")
else:
print("Notion token found successfully!")
if useNotionData:
notion = client_notion(auth=notionToken)
lu_mapperDict, subdomain_mapperDict = getDataFromNotion(
notion=notion,
notionToken=notionToken,
landuseDatabaseID=landuseDatabaseId,
subdomainDatabaseID=subdomainAttributesDatabaseId
)
# ---------------------- Accessing data from Speckle ---------------------- #
from specklepy.api.client import SpeckleClient
from specklepy.api.credentials import get_default_account, get_local_accounts
from specklepy.transports.server import ServerTransport
from specklepy.api import operations
from specklepy.objects.geometry import Polyline, Point
from specklepy.objects import Base
import speckle_utils
import data_utils
from config import landuseDatabaseId , streamId, dmBranchName, dmCommitId, luBranchName, luCommitId, distanceMatrixActivityNodes
from imports_utils import speckleToken
from config import useSpeckleData
from imports_utils import getDataFromSpeckle
if speckleToken is None:
raise Exception("Speckle token not found")
else:
print("Speckle token found successfully!")
if useSpeckleData == True:
CLIENT = SpeckleClient(host="https://speckle.xyz/")
account = get_default_account()
CLIENT.authenticate_with_token(token=speckleToken)
landuses, matrices = getDataFromSpeckle(speckleClient = CLIENT, streamID=streamId,matrixBranchName=dmBranchName, landuseBranchName=luBranchName)
df_dm = matrices[distanceMatrixActivityNodes]
df_dm_dict = df_dm.to_dict('index')
df_dm = df_dm.replace([np.inf, -np.inf], 10000).fillna(0)
df_dm = df_dm.apply(pd.to_numeric, errors='coerce')
df_dm = df_dm.round(0).astype(int)
mask_connected = df_dm.index.tolist()
df_lu = landuses.loc[mask_connected]
mergeAssetNonAssetLanduse = True
if mergeAssetNonAssetLanduse:
df_lu.columns = [col.replace('ASSETS+', '') for col in df_lu.columns]
df_lu = df_lu.replace([np.inf, -np.inf], 10000).fillna(0)
df_lu = df_lu.apply(pd.to_numeric, errors='coerce')
df_lu = df_lu.astype(int)
df_lu = df_lu.T.groupby(level=0).sum().T
def test(input_json):
print("Received input")
# Parse the input JSON string
try:
inputs = json.loads(input_json)
except json.JSONDecodeError:
inputs = json.loads(input_json.replace("'", '"'))
# ------------------------- Accessing input data from Grasshopper ------------------------- #
from imports_utils import getDataFromGrasshopper
from config import alpha as alphaDefault
from config import threshold as thresholdDefault
from imports_utils import findUniqueDomains
from imports_utils import findUniqueSubdomains
from imports_utils import landusesToSubdomains
from imports_utils import FindWorkplacesNumber
from imports_utils import computeAccessibility
from imports_utils import computeAccessibility_pointOfInterest
from imports_utils import remap
from imports_utils import accessibilityToLivability
useGrasshopperData = inputs['input']["useGrasshopperData"] # fetch grasshoper data or not
if useGrasshopperData == "True":
dfMatrix_gh, dfLanduses_gh, attributeMapperDict_gh, landuseMapperDict_gh, alpha, threshold = getDataFromGrasshopper(
inputJson = inputs,
inputNameMatrix = "matrix",
inputNameLanduse = "landuse_areas",
inputNameAttributeMapper = "attributeMapperDict",
inputNameLanduseMapper = "landuseMapperDict",
inputNameAlpha = "alpha",
inputNameThreshold = "threshold"
)
dfMatrix = dfMatrix_gh
dfLanduses = dfLanduses_gh
if useNotionData != True:
attributeMapperDict = attributeMapperDict_gh
landuseMapperDict = landuseMapperDict_gh
else:
dfMatrix_gh, dfLanduses_gh, attributeMapperDict_gh, landuseMapperDict_gh, alpha, threshold = getDataFromGrasshopper(
inputJson = inputs,
inputNameMatrix = None,
inputNameLanduse = None,
inputNameAttributeMapper = None,
inputNameLanduseMapper = None,
inputNameAlpha = "alpha",
inputNameThreshold = "threshold"
)
dfLanduses = df_lu.copy()
dfMatrix = df_dm.copy()
landuseMapperDict = lu_mapperDict
livabilityMapperDict = subdomain_mapperDict
domainsUnique = findUniqueDomains(livabilityMapperDict)
subdomainsUnique = findUniqueSubdomains(landuseMapperDict)
LivabilitySubdomainsWeights = landusesToSubdomains(dfMatrix,dfLanduses,landuseMapperDict,subdomainsUnique)
WorkplacesNumber = FindWorkplacesNumber(dfMatrix,livabilityMapperDict,LivabilitySubdomainsWeights,subdomainsUnique)
# prepare an input weights dataframe for the parameter LivabilitySubdomainsInputs
LivabilitySubdomainsInputs =pd.concat([LivabilitySubdomainsWeights, WorkplacesNumber], axis=1)
subdomainsAccessibility = computeAccessibility(dfMatrix,LivabilitySubdomainsInputs,alpha,threshold)
livability = accessibilityToLivability(dfMatrix,subdomainsAccessibility,livabilityMapperDict,domainsUnique)
livability_dictionary = livability.to_dict('index')
LivabilitySubdomainsInputs_dictionary = LivabilitySubdomainsInputs.to_dict('index')
subdomainsAccessibility_dictionary = subdomainsAccessibility.to_dict('index')
LivabilitySubdomainsWeights_dictionary = LivabilitySubdomainsWeights.to_dict('index')
df_lu_dict = dfLanduses.to_dict('index')
dm_dictionary = dfMatrix.to_dict('index')
# Prepare the output
output = {
"subdomainsAccessibility_dictionary": subdomainsAccessibility_dictionary,
"livability_dictionary": livability_dictionary,
"subdomainsWeights_dictionary": LivabilitySubdomainsInputs_dictionary,
"luDomainMapper": landuseMapperDict,
"attributeMapper": livabilityMapperDict,
"mask_connected": mask_connected,
"dm_an": dm_dictionary,
"landuses":df_lu_dict,
"constants": [alpha, threshold]
}
return json.dumps(output)
# Define the Gradio interface with a single JSON input
iface = gr.Interface(
fn=test,
inputs=gr.Textbox(label="Input JSON", lines=20, placeholder="Enter JSON with all parameters here..."),
outputs=gr.JSON(label="Output JSON"),
title="testspace"
)
iface.launch() |