Spaces:
Runtime error
Runtime error
Delete pages
Browse files- pages/1_Wind.py +0 -204
- pages/2_Temperature.py +0 -198
- pages/3_Humidity.py +0 -200
- pages/4_Smart Lampposts.py +0 -177
- pages/5_Past Records.py +0 -141
- pages/6_Heat island.py +0 -183
- pages/7_CoWIN.py +0 -172
pages/1_Wind.py
DELETED
@@ -1,204 +0,0 @@
|
|
1 |
-
import streamlit as st
|
2 |
-
import requests
|
3 |
-
import folium
|
4 |
-
from streamlit_folium import st_folium
|
5 |
-
import pandas as pd
|
6 |
-
import plotly.graph_objs as go
|
7 |
-
import branca.colormap as cm
|
8 |
-
import pytz
|
9 |
-
from datetime import datetime
|
10 |
-
|
11 |
-
# Set page layout to wide
|
12 |
-
st.set_page_config(layout="wide", page_title="Real-Time Wind Data Dashboard")
|
13 |
-
|
14 |
-
@st.cache_data(ttl=300)
|
15 |
-
def fetch_geojson_data(url):
|
16 |
-
response = requests.get(url)
|
17 |
-
data = response.json()
|
18 |
-
hk_tz = pytz.timezone('Asia/Hong_Kong')
|
19 |
-
fetch_time = datetime.now(hk_tz).strftime('%Y-%m-%dT%H:%M:%S')
|
20 |
-
return data, fetch_time
|
21 |
-
|
22 |
-
# Function to calculate wind statistics
|
23 |
-
def calculate_wind_stats(features):
|
24 |
-
gust_speeds = [feature['properties']['10-Minute Maximum Gust(km/hour)'] for feature in features if
|
25 |
-
feature['properties']['10-Minute Maximum Gust(km/hour)'] is not None]
|
26 |
-
mean_speeds = [feature['properties']['10-Minute Mean Speed(km/hour)'] for feature in features if
|
27 |
-
feature['properties']['10-Minute Mean Speed(km/hour)'] is not None]
|
28 |
-
|
29 |
-
if not gust_speeds:
|
30 |
-
return None, None, None, None
|
31 |
-
avg_gust = sum(gust_speeds) / len(gust_speeds)
|
32 |
-
min_gust = min(gust_speeds)
|
33 |
-
max_gust = max(gust_speeds)
|
34 |
-
avg_mean_speed = sum(mean_speeds) / len(mean_speeds) if mean_speeds else None
|
35 |
-
return avg_gust, min_gust, max_gust, avg_mean_speed
|
36 |
-
|
37 |
-
# Function to convert wind direction to degrees
|
38 |
-
def mean_wind_direction_to_degrees(direction):
|
39 |
-
directions = {
|
40 |
-
'North': 0, 'Northeast': 45, 'East': 90, 'Southeast': 135,
|
41 |
-
'South': 180, 'Southwest': 225, 'West': 270, 'Northwest': 315
|
42 |
-
}
|
43 |
-
return directions.get(direction, 0)
|
44 |
-
|
45 |
-
# Fetch GeoJSON data
|
46 |
-
url = 'https://csdi.vercel.app/weather/wind'
|
47 |
-
geo_data, fetch_time = fetch_geojson_data(url)
|
48 |
-
|
49 |
-
# Calculate wind statistics
|
50 |
-
avg_gust, min_gust, max_gust, avg_mean_speed = calculate_wind_stats(geo_data['features'])
|
51 |
-
|
52 |
-
# Create a map centered on a specific location
|
53 |
-
map_center = [22.35473034278638, 114.14827142452518] # Coordinates of Hong Kong
|
54 |
-
my_map = folium.Map(location=map_center, zoom_start=10.35, tiles='CartoDB positron')
|
55 |
-
|
56 |
-
# Create a colormap for wind speed with limited width
|
57 |
-
colormap = cm.LinearColormap(colors=['#000000', '#0066eb', '#ff3d77', '#eb0000'],
|
58 |
-
vmin=0, vmax=85)
|
59 |
-
my_map.add_child(colormap)
|
60 |
-
|
61 |
-
# Function to calculate arrow size based on wind speed
|
62 |
-
def get_arrow_size(speed):
|
63 |
-
if speed is None:
|
64 |
-
return 20
|
65 |
-
return max(20, min(50, speed * 2))
|
66 |
-
|
67 |
-
# Add the GeoJSON data to the map with arrow markers
|
68 |
-
for feature in geo_data['features']:
|
69 |
-
coordinates = feature['geometry']['coordinates']
|
70 |
-
mean_wind_direction = feature['properties']['10-Minute Mean Wind Direction(Compass points)']
|
71 |
-
mean_speed = feature['properties']['10-Minute Mean Speed(km/hour)']
|
72 |
-
|
73 |
-
# Skip plotting if wind direction is null
|
74 |
-
if mean_wind_direction is None:
|
75 |
-
continue
|
76 |
-
|
77 |
-
# Calculate rotation angle for wind direction
|
78 |
-
rotation_angle = mean_wind_direction_to_degrees(mean_wind_direction)
|
79 |
-
|
80 |
-
# Calculate arrow size based on wind speed
|
81 |
-
arrow_size = get_arrow_size(mean_speed)
|
82 |
-
|
83 |
-
# Determine color based on wind speed
|
84 |
-
color = colormap(mean_speed) if mean_speed is not None else 'gray'
|
85 |
-
|
86 |
-
# Create an arrow marker for wind direction
|
87 |
-
folium.Marker(
|
88 |
-
location=[coordinates[1], coordinates[0]],
|
89 |
-
icon=folium.DivIcon(html=f"""
|
90 |
-
<div style="
|
91 |
-
width: {arrow_size}px; height: {arrow_size}px;
|
92 |
-
display: flex; align-items: center; justify-content: center;
|
93 |
-
transform: rotate({rotation_angle}deg);
|
94 |
-
">
|
95 |
-
<svg width="{arrow_size}" height="{arrow_size}" viewBox="0 0 24 24" fill="none" stroke="{color}" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
96 |
-
<line x1="12" y1="5" x2="12" y2="19"></line>
|
97 |
-
<polyline points="5 12 12 5 19 12"></polyline>
|
98 |
-
</svg>
|
99 |
-
</div>
|
100 |
-
"""),
|
101 |
-
popup=folium.Popup(f"""
|
102 |
-
<b>{feature['properties']['Automatic Weather Station']}</b><br>
|
103 |
-
Direction: {mean_wind_direction}<br>
|
104 |
-
Speed: {mean_speed} km/h<br>
|
105 |
-
Max Gust: {feature['properties']['10-Minute Maximum Gust(km/hour)']} km/h
|
106 |
-
""", max_width=300)
|
107 |
-
).add_to(my_map)
|
108 |
-
|
109 |
-
col1, col2, col3 = st.columns([1.65, 2, 1.15])
|
110 |
-
|
111 |
-
with col1:
|
112 |
-
if geo_data['features']:
|
113 |
-
wind_directions = [feature['properties']['10-Minute Mean Wind Direction(Compass points)'] for feature in
|
114 |
-
geo_data['features']]
|
115 |
-
direction_counts = {d: wind_directions.count(d) for d in
|
116 |
-
['North', 'Northeast', 'East', 'Southeast', 'South', 'Southwest', 'West', 'Northwest']}
|
117 |
-
|
118 |
-
# Prepare wind speeds for each direction
|
119 |
-
direction_speeds = {d: [] for d in
|
120 |
-
['North', 'Northeast', 'East', 'Southeast', 'South', 'Southwest', 'West', 'Northwest']}
|
121 |
-
for feature in geo_data['features']:
|
122 |
-
direction = feature['properties']['10-Minute Mean Wind Direction(Compass points)']
|
123 |
-
speed = feature['properties']['10-Minute Mean Speed(km/hour)']
|
124 |
-
if direction in direction_speeds and speed is not None:
|
125 |
-
direction_speeds[direction].append(speed)
|
126 |
-
|
127 |
-
# Calculate average wind speed for each direction
|
128 |
-
average_speeds = {d: sum(speeds) / len(speeds) if speeds else 0 for d, speeds in direction_speeds.items()}
|
129 |
-
|
130 |
-
# Plot wind direction rose with average wind speed
|
131 |
-
fig = go.Figure()
|
132 |
-
|
133 |
-
# Add polar bar for wind direction
|
134 |
-
fig.add_trace(go.Barpolar(
|
135 |
-
r=[direction_counts[d] for d in direction_counts.keys()],
|
136 |
-
theta=list(direction_counts.keys()),
|
137 |
-
name='Wind Direction Count',
|
138 |
-
marker_color='#0008ff',
|
139 |
-
opacity=0.5
|
140 |
-
))
|
141 |
-
|
142 |
-
# Add radial bar for average wind speed
|
143 |
-
fig.add_trace(go.Barpolar(
|
144 |
-
r=list(average_speeds.values()),
|
145 |
-
theta=list(average_speeds.keys()),
|
146 |
-
name='Average Wind Speed',
|
147 |
-
marker_color='#ff0019', # Orange color for wind speed
|
148 |
-
opacity=0.5,
|
149 |
-
thetaunit='radians', # Ensures radial bars are correctly positioned
|
150 |
-
base=0 # Base of the radial bars starts from 0
|
151 |
-
))
|
152 |
-
|
153 |
-
fig.update_layout(
|
154 |
-
polar=dict(
|
155 |
-
radialaxis=dict(
|
156 |
-
visible=False,
|
157 |
-
range=[0, max(direction_counts.values())]
|
158 |
-
),
|
159 |
-
angularaxis=dict(
|
160 |
-
tickvals=list(direction_counts.keys()),
|
161 |
-
ticktext=['N', 'NE', 'E', 'SE', 'S', 'SW', 'W', 'NW'],
|
162 |
-
rotation=90, # Rotate to make North the top
|
163 |
-
direction='clockwise'
|
164 |
-
)
|
165 |
-
),
|
166 |
-
width=500,
|
167 |
-
height=380,
|
168 |
-
title={'text': 'Wind Direction and Average Speed Rose Plot', 'font': {'size': 18}},
|
169 |
-
legend={'x': 0.8, 'y': 0.95}
|
170 |
-
)
|
171 |
-
|
172 |
-
st.plotly_chart(fig, use_container_width=True)
|
173 |
-
|
174 |
-
st.caption(f"Data fetched at: {fetch_time}")
|
175 |
-
|
176 |
-
if avg_gust is not None:
|
177 |
-
col1a, col1b = st.columns(2)
|
178 |
-
with col1a:
|
179 |
-
st.metric(label="Avg Max Gust (km/h)", value=f"{avg_gust:.2f}")
|
180 |
-
st.metric(label="Min Max Gust (km/h)", value=f"{min_gust}")
|
181 |
-
with col1b:
|
182 |
-
st.metric(label="Max Max Gust (km/h)", value=f"{max_gust}")
|
183 |
-
if avg_mean_speed is not None:
|
184 |
-
st.metric(label="Avg Mean Speed (km/h)", value=f"{avg_mean_speed:.2f}")
|
185 |
-
else:
|
186 |
-
st.write("No valid wind data available to calculate statistics.")
|
187 |
-
|
188 |
-
gust_speeds = [feature['properties']['10-Minute Maximum Gust(km/hour)'] for feature in geo_data['features'] if
|
189 |
-
feature['properties']['10-Minute Maximum Gust(km/hour)'] is not None]
|
190 |
-
|
191 |
-
|
192 |
-
with col3:
|
193 |
-
table_data = [{
|
194 |
-
'Weather Station': feature['properties']['Automatic Weather Station'],
|
195 |
-
'Mean Wind Direction': feature['properties']['10-Minute Mean Wind Direction(Compass points)'],
|
196 |
-
'Mean Speed(km/hour)': feature['properties']['10-Minute Mean Speed(km/hour)'],
|
197 |
-
'Maximum Gust(km/hour)': feature['properties']['10-Minute Maximum Gust(km/hour)']
|
198 |
-
} for feature in geo_data['features']]
|
199 |
-
|
200 |
-
st.dataframe(pd.DataFrame(table_data), height=600)
|
201 |
-
|
202 |
-
with col2:
|
203 |
-
# Display map
|
204 |
-
st_folium(my_map, width=500, height=600)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
pages/2_Temperature.py
DELETED
@@ -1,198 +0,0 @@
|
|
1 |
-
import streamlit as st
|
2 |
-
import requests
|
3 |
-
import json
|
4 |
-
import pandas as pd
|
5 |
-
import folium
|
6 |
-
from streamlit_folium import st_folium
|
7 |
-
import plotly.graph_objects as go
|
8 |
-
import numpy as np
|
9 |
-
from datetime import datetime, timezone
|
10 |
-
import time
|
11 |
-
import pytz
|
12 |
-
|
13 |
-
# Set page layout to wide
|
14 |
-
st.set_page_config(layout="wide", page_title="Real-Time Temperature Data Dashboard")
|
15 |
-
|
16 |
-
# Function to fetch JSON data with caching and expiration
|
17 |
-
@st.cache_data(ttl=300) # Cache data for 5 minutes (300 seconds)
|
18 |
-
def fetch_data():
|
19 |
-
url = 'https://csdi.vercel.app/weather/temp'
|
20 |
-
response = requests.get(url)
|
21 |
-
hk_tz = pytz.timezone('Asia/Hong_Kong')
|
22 |
-
fetch_time = datetime.now(hk_tz).strftime('%Y-%m-%dT%H:%M:%S')
|
23 |
-
return json.loads(response.text), fetch_time
|
24 |
-
|
25 |
-
# Fetch the JSON data
|
26 |
-
data, fetch_time = fetch_data()
|
27 |
-
|
28 |
-
# Create a Pandas DataFrame from the JSON data
|
29 |
-
features = data['features']
|
30 |
-
df = pd.json_normalize(features)
|
31 |
-
|
32 |
-
# Rename columns for easier access
|
33 |
-
df.rename(columns={
|
34 |
-
'properties.Automatic Weather Station': 'Station',
|
35 |
-
'properties.Air Temperature(degree Celsius)': 'Temperature',
|
36 |
-
'geometry.coordinates': 'Coordinates'
|
37 |
-
}, inplace=True)
|
38 |
-
|
39 |
-
# Split Coordinates into separate Longitude and Latitude columns
|
40 |
-
df[['Longitude', 'Latitude']] = pd.DataFrame(df['Coordinates'].tolist(), index=df.index)
|
41 |
-
|
42 |
-
# Extract temperature data
|
43 |
-
temps = df['Temperature'].dropna().tolist()
|
44 |
-
|
45 |
-
# Create three columns
|
46 |
-
col1, col2, col3 = st.columns([1.65, 2, 1.15])
|
47 |
-
|
48 |
-
# Column 1: Histogram and statistics with two-sigma analysis
|
49 |
-
with col1:
|
50 |
-
# Row 1: Histogram
|
51 |
-
with st.container():
|
52 |
-
# Convert list to pandas Series
|
53 |
-
temps_series = pd.Series(temps)
|
54 |
-
|
55 |
-
# Calculate histogram data
|
56 |
-
hist_data = np.histogram(temps_series, bins=10)
|
57 |
-
bin_edges = hist_data[1]
|
58 |
-
counts = hist_data[0]
|
59 |
-
|
60 |
-
|
61 |
-
# Create a color gradient from blue to red
|
62 |
-
def get_color(value, min_value, max_value):
|
63 |
-
ratio = (value - min_value) / (max_value - min_value)
|
64 |
-
r = int(255 * ratio) # Red component
|
65 |
-
b = int(255 * (1 - ratio)) # Blue component
|
66 |
-
return f'rgb({r}, 0, {b})'
|
67 |
-
|
68 |
-
|
69 |
-
# Create histogram with Plotly Graph Objects
|
70 |
-
fig = go.Figure()
|
71 |
-
|
72 |
-
# Add histogram bars with gradient colors
|
73 |
-
for i in range(len(bin_edges) - 1):
|
74 |
-
bin_center = (bin_edges[i] + bin_edges[i + 1]) / 2
|
75 |
-
color = get_color(bin_center, bin_edges.min(), bin_edges.max())
|
76 |
-
fig.add_trace(go.Bar(
|
77 |
-
x=[f'{bin_edges[i]:.1f} - {bin_edges[i + 1]:.1f}'],
|
78 |
-
y=[counts[i]],
|
79 |
-
marker_color=color,
|
80 |
-
name=f'{bin_edges[i]:.1f} - {bin_edges[i + 1]:.1f}'
|
81 |
-
))
|
82 |
-
|
83 |
-
# Customize layout
|
84 |
-
fig.update_layout(
|
85 |
-
xaxis_title='Temperature (°C)',
|
86 |
-
yaxis_title='Count',
|
87 |
-
title='Temperature Distribution',
|
88 |
-
bargap=0.2, # Adjust gap between bars
|
89 |
-
title_font_size=20,
|
90 |
-
xaxis_title_font_size=14,
|
91 |
-
yaxis_title_font_size=14,
|
92 |
-
height=350, # Set plot height
|
93 |
-
xaxis=dict(title_font_size=14),
|
94 |
-
yaxis=dict(title_font_size=14)
|
95 |
-
)
|
96 |
-
|
97 |
-
# Display the plot in Streamlit
|
98 |
-
st.plotly_chart(fig, use_container_width=True)
|
99 |
-
st.caption(f"Data fetched at: {fetch_time}")
|
100 |
-
|
101 |
-
# Row 2: Statistics
|
102 |
-
with st.container():
|
103 |
-
col_1, col_2 = st.columns([1, 1])
|
104 |
-
with col_1:
|
105 |
-
if temps:
|
106 |
-
avg_temp = np.mean(temps)
|
107 |
-
std_temp = np.std(temps)
|
108 |
-
max_temp = np.max(temps)
|
109 |
-
min_temp = np.min(temps)
|
110 |
-
|
111 |
-
two_sigma_range = (avg_temp - 2 * std_temp, avg_temp + 2 * std_temp)
|
112 |
-
|
113 |
-
st.metric(label="Average Temperature (°C)", value=f"{avg_temp:.2f}")
|
114 |
-
st.metric(label="Minimum Temperature (°C)", value=f"{min_temp:.2f}")
|
115 |
-
with col_2:
|
116 |
-
st.metric(label="Maximum Temperature (°C)", value=f"{max_temp:.2f}")
|
117 |
-
st.metric(label="Std. Dev (°C)", value=f"{std_temp:.2f}")
|
118 |
-
|
119 |
-
|
120 |
-
# Column 2: Map
|
121 |
-
def temperature_to_color(temp, min_temp, max_temp):
|
122 |
-
"""Convert temperature to a color based on the gradient from blue (low) to red (high)."""
|
123 |
-
norm_temp = (temp - min_temp) / (max_temp - min_temp)
|
124 |
-
red = int(255 * norm_temp)
|
125 |
-
blue = int(255 * (1 - norm_temp))
|
126 |
-
return f'rgb({red}, 0, {blue})'
|
127 |
-
|
128 |
-
with col2:
|
129 |
-
# Create the base map
|
130 |
-
m = folium.Map(location=[22.3547, 114.1483], zoom_start=11, tiles='CartoDB positron')
|
131 |
-
|
132 |
-
# Determine min and max temperatures for color scaling
|
133 |
-
min_temp = df['Temperature'].min()
|
134 |
-
max_temp = df['Temperature'].max()
|
135 |
-
|
136 |
-
# Create a color scale legend
|
137 |
-
colormap = folium.LinearColormap(
|
138 |
-
colors=['blue', 'white', 'red'],
|
139 |
-
index=[min_temp, (min_temp + max_temp) / 2, max_temp],
|
140 |
-
vmin=min_temp,
|
141 |
-
vmax=max_temp,
|
142 |
-
caption='Temperature (°C)'
|
143 |
-
)
|
144 |
-
colormap.add_to(m)
|
145 |
-
|
146 |
-
# Iterate through each row in the DataFrame
|
147 |
-
for _, row in df.iterrows():
|
148 |
-
lat = row['Latitude']
|
149 |
-
lon = row['Longitude']
|
150 |
-
station = row['Station']
|
151 |
-
temp = row['Temperature']
|
152 |
-
|
153 |
-
# Determine the color based on the temperature
|
154 |
-
color = temperature_to_color(temp, min_temp, max_temp) if pd.notna(temp) else 'gray'
|
155 |
-
|
156 |
-
# Create a marker with temperature data
|
157 |
-
folium.Marker(
|
158 |
-
location=[lat, lon],
|
159 |
-
popup=f"<p style='font-size: 12px; background-color: white; padding: 5px; border-radius: 5px;'>{station}: {temp:.1f}°C</p>",
|
160 |
-
icon=folium.DivIcon(
|
161 |
-
html=f'<div style="font-size: 10pt; color: {color}; padding: 2px; border-radius: 5px;">'
|
162 |
-
f'<strong>{temp:.1f}°C</strong></div>'
|
163 |
-
)
|
164 |
-
).add_to(m)
|
165 |
-
|
166 |
-
# Render the map in Streamlit
|
167 |
-
st_folium(m, width=500, height=600)
|
168 |
-
|
169 |
-
# Column 3: Data table
|
170 |
-
with col3:
|
171 |
-
# Set the table height using CSS
|
172 |
-
st.markdown(
|
173 |
-
"""
|
174 |
-
<style>
|
175 |
-
.dataframe-container {
|
176 |
-
height: 600px;
|
177 |
-
overflow-y: auto;
|
178 |
-
}
|
179 |
-
</style>
|
180 |
-
""",
|
181 |
-
unsafe_allow_html=True
|
182 |
-
)
|
183 |
-
|
184 |
-
# Display the DataFrame with the custom CSS class
|
185 |
-
st.dataframe(df[['Station', 'Temperature', 'Latitude', 'Longitude']], height=600)
|
186 |
-
|
187 |
-
# Add a refresh button
|
188 |
-
if st.button("Refresh Data"):
|
189 |
-
st.experimental_rerun()
|
190 |
-
|
191 |
-
# Automatically rerun every 5 minutes
|
192 |
-
if 'last_ran' not in st.session_state:
|
193 |
-
st.session_state.last_ran = datetime.now(timezone.utc)
|
194 |
-
|
195 |
-
current_time = datetime.now(timezone.utc)
|
196 |
-
if (current_time - st.session_state.last_ran).total_seconds() > 300:
|
197 |
-
st.session_state.last_ran = current_time
|
198 |
-
st.experimental_rerun()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
pages/3_Humidity.py
DELETED
@@ -1,200 +0,0 @@
|
|
1 |
-
import streamlit as st
|
2 |
-
import pandas as pd
|
3 |
-
import plotly.express as px
|
4 |
-
import plotly.graph_objects as go
|
5 |
-
import folium
|
6 |
-
from folium import LinearColormap
|
7 |
-
import requests
|
8 |
-
from datetime import datetime, timedelta
|
9 |
-
from streamlit_folium import st_folium
|
10 |
-
import pytz
|
11 |
-
from datetime import datetime
|
12 |
-
|
13 |
-
# Set page layout to wide
|
14 |
-
st.set_page_config(layout="wide", page_title="Real-Time Relative Humidity Data Dashboard")
|
15 |
-
|
16 |
-
# Function to load data
|
17 |
-
@st.cache_data(ttl=300) # Cache data to avoid reloading every time
|
18 |
-
def load_data():
|
19 |
-
with st.spinner("Loading data..."):
|
20 |
-
response = requests.get("https://csdi.vercel.app/weather/rhum")
|
21 |
-
data = response.json()
|
22 |
-
features = data['features']
|
23 |
-
df = pd.json_normalize(features)
|
24 |
-
df.rename(columns={
|
25 |
-
'properties.Relative Humidity(percent)': 'Relative Humidity (%)',
|
26 |
-
'properties.Automatic Weather Station': 'Station Name',
|
27 |
-
'geometry.coordinates': 'Coordinates'
|
28 |
-
}, inplace=True)
|
29 |
-
df.dropna(subset=['Relative Humidity (%)'], inplace=True)
|
30 |
-
hk_tz = pytz.timezone('Asia/Hong_Kong')
|
31 |
-
fetch_time = datetime.now(hk_tz).strftime('%Y-%m-%dT%H:%M:%S')
|
32 |
-
return df, fetch_time
|
33 |
-
|
34 |
-
# Check if the data has been loaded before
|
35 |
-
if 'last_run' not in st.session_state or (datetime.now() - st.session_state.last_run) > timedelta(minutes=5):
|
36 |
-
st.session_state.df, st.session_state.fetch_time = load_data()
|
37 |
-
st.session_state.last_run = datetime.now()
|
38 |
-
|
39 |
-
# Data
|
40 |
-
df = st.session_state.df
|
41 |
-
fetch_time = st.session_state.fetch_time
|
42 |
-
|
43 |
-
# Compute statistics
|
44 |
-
humidity_data = df['Relative Humidity (%)']
|
45 |
-
avg_humidity = humidity_data.mean()
|
46 |
-
max_humidity = humidity_data.max()
|
47 |
-
min_humidity = humidity_data.min()
|
48 |
-
std_humidity = humidity_data.std()
|
49 |
-
|
50 |
-
# Create three columns
|
51 |
-
col1, col2, col3 = st.columns([1.65, 2, 1.15])
|
52 |
-
|
53 |
-
# Column 1: Histogram and statistics
|
54 |
-
with col1:
|
55 |
-
# Define colors for gradient
|
56 |
-
color_scale = ['#58a0db', '#0033cc']
|
57 |
-
|
58 |
-
# Create histogram
|
59 |
-
fig = px.histogram(df, x='Relative Humidity (%)', nbins=20,
|
60 |
-
labels={'Relative Humidity (%)': 'Relative Humidity (%)'},
|
61 |
-
title='Relative Humidity Histogram',
|
62 |
-
color_discrete_sequence=color_scale)
|
63 |
-
|
64 |
-
# Add average line
|
65 |
-
fig.add_shape(
|
66 |
-
go.layout.Shape(
|
67 |
-
type="line",
|
68 |
-
x0=avg_humidity,
|
69 |
-
y0=0,
|
70 |
-
x1=avg_humidity,
|
71 |
-
y1=df['Relative Humidity (%)'].value_counts().max(),
|
72 |
-
line=dict(color="red", width=2, dash="dash"),
|
73 |
-
)
|
74 |
-
)
|
75 |
-
|
76 |
-
# Update layout
|
77 |
-
fig.update_layout(
|
78 |
-
xaxis_title='Relative Humidity (%)',
|
79 |
-
yaxis_title='Count',
|
80 |
-
title='Relative Humidity Distribution',
|
81 |
-
bargap=0.2,
|
82 |
-
title_font_size=20,
|
83 |
-
xaxis_title_font_size=14,
|
84 |
-
yaxis_title_font_size=14,
|
85 |
-
height=350,
|
86 |
-
shapes=[{
|
87 |
-
'type': 'rect',
|
88 |
-
'x0': min_humidity,
|
89 |
-
'x1': max_humidity,
|
90 |
-
'y0': 0,
|
91 |
-
'y1': df['Relative Humidity (%)'].value_counts().max(),
|
92 |
-
'fillcolor': 'rgba(0, 100, 255, 0.2)',
|
93 |
-
'line': {
|
94 |
-
'color': 'rgba(0, 100, 255, 0.2)',
|
95 |
-
'width': 0
|
96 |
-
},
|
97 |
-
'opacity': 0.1
|
98 |
-
}]
|
99 |
-
)
|
100 |
-
|
101 |
-
# Add annotations
|
102 |
-
fig.add_annotation(
|
103 |
-
x=avg_humidity,
|
104 |
-
y=df['Relative Humidity (%)'].value_counts().max() * 0.9,
|
105 |
-
text=f"Average: {avg_humidity:.2f}%",
|
106 |
-
showarrow=True,
|
107 |
-
arrowhead=1
|
108 |
-
)
|
109 |
-
|
110 |
-
st.plotly_chart(fig, use_container_width=True)
|
111 |
-
st.caption(f"Data fetched at: {fetch_time}")
|
112 |
-
|
113 |
-
# Display statistics
|
114 |
-
col_1, col_2 = st.columns([1, 1])
|
115 |
-
with col_1:
|
116 |
-
st.metric(label="Average R.Humidity (%)", value=f"{avg_humidity:.2f}")
|
117 |
-
st.metric(label="Minimum R.Humidity (%)", value=f"{min_humidity:.2f}")
|
118 |
-
with col_2:
|
119 |
-
st.metric(label="Maximum R.Humidity (%)", value=f"{max_humidity:.2f}")
|
120 |
-
st.metric(label="Std. Dev (%)", value=f"{std_humidity:.2f}")
|
121 |
-
|
122 |
-
|
123 |
-
# Function to convert humidity to color based on gradient
|
124 |
-
def humidity_to_color(humidity, min_humidity, max_humidity):
|
125 |
-
if pd.isna(humidity):
|
126 |
-
return 'rgba(0, 0, 0, 0)' # Return a transparent color if the humidity is NaN
|
127 |
-
|
128 |
-
norm_humidity = (humidity - min_humidity) / (max_humidity - min_humidity)
|
129 |
-
|
130 |
-
# Colors from light blue (#add8e6) to dark blue (#00008b)
|
131 |
-
if norm_humidity < 0.5:
|
132 |
-
r = int(173 + (0 - 173) * (2 * norm_humidity))
|
133 |
-
g = int(216 + (0 - 216) * (2 * norm_humidity))
|
134 |
-
b = int(230 + (139 - 230) * (2 * norm_humidity))
|
135 |
-
else:
|
136 |
-
r = int(0 + (0 - 0) * (2 * (norm_humidity - 0.5)))
|
137 |
-
g = int(0 + (0 - 0) * (2 * (norm_humidity - 0.5)))
|
138 |
-
b = int(139 + (139 - 139) * (2 * (norm_humidity - 0.5)))
|
139 |
-
|
140 |
-
return f'rgb({r}, {g}, {b})'
|
141 |
-
|
142 |
-
# Column 2: Map
|
143 |
-
with col2:
|
144 |
-
with st.spinner("Loading map..."):
|
145 |
-
m = folium.Map(location=[22.3547, 114.1483], zoom_start=11, tiles='CartoDB positron')
|
146 |
-
min_humidity = df['Relative Humidity (%)'].min()
|
147 |
-
max_humidity = df['Relative Humidity (%)'].max()
|
148 |
-
|
149 |
-
colormap = LinearColormap(
|
150 |
-
colors=['#58a0db', 'blue'],
|
151 |
-
index=[min_humidity, max_humidity],
|
152 |
-
vmin=min_humidity,
|
153 |
-
vmax=max_humidity,
|
154 |
-
caption='Relative Humidity (%)'
|
155 |
-
)
|
156 |
-
colormap.add_to(m)
|
157 |
-
|
158 |
-
for _, row in df.iterrows():
|
159 |
-
humidity = row['Relative Humidity (%)']
|
160 |
-
color = humidity_to_color(humidity, min_humidity, max_humidity)
|
161 |
-
|
162 |
-
folium.Marker(
|
163 |
-
location=[row['Coordinates'][1], row['Coordinates'][0]],
|
164 |
-
popup=f"<p style='font-size: 12px; background-color: white; padding: 5px; border-radius: 5px;'>{row['Station Name']}: {humidity:.1f}%</p>",
|
165 |
-
icon=folium.DivIcon(
|
166 |
-
html=f'<div style="font-size: 10pt; color: {color}; padding: 2px; border-radius: 5px;">'
|
167 |
-
f'<strong>{humidity:.1f}%</strong></div>'
|
168 |
-
)
|
169 |
-
).add_to(m)
|
170 |
-
|
171 |
-
st_folium(m, width=500, height=600)
|
172 |
-
|
173 |
-
# Column 3: Data Table
|
174 |
-
with col3:
|
175 |
-
st.markdown(
|
176 |
-
"""
|
177 |
-
<style>
|
178 |
-
.dataframe-container {
|
179 |
-
height: 600px;
|
180 |
-
overflow-y: auto;
|
181 |
-
}
|
182 |
-
.dataframe th, .dataframe td {
|
183 |
-
text-align: left;
|
184 |
-
padding: 8px;
|
185 |
-
}
|
186 |
-
</style>
|
187 |
-
""",
|
188 |
-
unsafe_allow_html=True
|
189 |
-
)
|
190 |
-
|
191 |
-
# Rename column for display
|
192 |
-
df_display = df[['Station Name', 'Relative Humidity (%)']].rename(columns={'Relative Humidity (%)': 'R.Humidity'})
|
193 |
-
st.dataframe(df_display, height=600)
|
194 |
-
|
195 |
-
# Refresh Button
|
196 |
-
if st.button("Refresh Data"):
|
197 |
-
with st.spinner("Refreshing data..."):
|
198 |
-
st.session_state.df, st.session_state.fetch_time = load_data()
|
199 |
-
st.session_state.last_run = datetime.now()
|
200 |
-
st.experimental_rerun()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
pages/4_Smart Lampposts.py
DELETED
@@ -1,177 +0,0 @@
|
|
1 |
-
import streamlit as st
|
2 |
-
import pandas as pd
|
3 |
-
import requests
|
4 |
-
import plotly.express as px
|
5 |
-
import plotly.graph_objs as go
|
6 |
-
from folium import DivIcon
|
7 |
-
import folium
|
8 |
-
from streamlit_folium import st_folium
|
9 |
-
from sklearn.linear_model import LinearRegression
|
10 |
-
from sklearn.cluster import DBSCAN
|
11 |
-
import matplotlib.cm as cm
|
12 |
-
import matplotlib.colors as mcolors
|
13 |
-
import time
|
14 |
-
import json
|
15 |
-
import pytz
|
16 |
-
from datetime import datetime
|
17 |
-
|
18 |
-
# Set page layout to wide
|
19 |
-
st.set_page_config(layout="wide", page_title="Real-Time Smart Lamppost Data Dashboard")
|
20 |
-
|
21 |
-
# Function to fetch JSON data with caching and expiration
|
22 |
-
@st.cache_data(ttl=600)
|
23 |
-
def fetch_data(url):
|
24 |
-
response = requests.get(url)
|
25 |
-
hk_tz = pytz.timezone('Asia/Hong_Kong')
|
26 |
-
fetch_time = datetime.now(hk_tz).strftime('%Y-%m-%dT%H:%M:%S')
|
27 |
-
return json.loads(response.text), fetch_time
|
28 |
-
|
29 |
-
# Function to calculate "feels like" temperature
|
30 |
-
def feels_like_temperature(temp_celsius, humidity_percent):
|
31 |
-
return temp_celsius - (0.55 - 0.0055 * humidity_percent) * (temp_celsius - 14.5)
|
32 |
-
|
33 |
-
# Function to process the raw data into a DataFrame
|
34 |
-
def process_data(data):
|
35 |
-
features = data['features']
|
36 |
-
records = [
|
37 |
-
{
|
38 |
-
'latitude': feature['geometry']['coordinates'][1],
|
39 |
-
'longitude': feature['geometry']['coordinates'][0],
|
40 |
-
'temperature': feature['properties'].get('Air temperature (°C) / 氣溫 (°C) / 气温 (°C)'),
|
41 |
-
'humidity': feature['properties'].get('Relative humidity (%) / 相對濕度 (%) / 相对湿度 (%)')
|
42 |
-
}
|
43 |
-
for feature in features
|
44 |
-
]
|
45 |
-
df = pd.DataFrame(records)
|
46 |
-
|
47 |
-
# Convert temperature and humidity to numeric, forcing errors to NaN
|
48 |
-
df['temperature'] = pd.to_numeric(df['temperature'], errors='coerce')
|
49 |
-
df['humidity'] = pd.to_numeric(df['humidity'], errors='coerce')
|
50 |
-
|
51 |
-
# Drop rows with NaN values
|
52 |
-
df = df.dropna(subset=['temperature', 'humidity'])
|
53 |
-
|
54 |
-
# Calculate "feels like" temperature
|
55 |
-
df['feels_like'] = df.apply(lambda row: feels_like_temperature(row['temperature'], row['humidity']), axis=1)
|
56 |
-
|
57 |
-
return df
|
58 |
-
|
59 |
-
# Fetch and process data
|
60 |
-
url = "https://csdi.vercel.app/weather/smls"
|
61 |
-
data, fetch_time = fetch_data(url)
|
62 |
-
df = process_data(data)
|
63 |
-
|
64 |
-
# Perform clustering using DBSCAN
|
65 |
-
coords = df[['latitude', 'longitude']].values
|
66 |
-
db = DBSCAN(eps=0.01, min_samples=5).fit(coords)
|
67 |
-
df['cluster'] = db.labels_
|
68 |
-
|
69 |
-
# Initialize the 'predicted_humidity' column with NaN
|
70 |
-
df['predicted_humidity'] = pd.NA
|
71 |
-
|
72 |
-
# Perform linear regression for each cluster
|
73 |
-
for cluster in df['cluster'].unique():
|
74 |
-
cluster_data = df[df['cluster'] == cluster]
|
75 |
-
if len(cluster_data) > 1: # Only perform regression if there are enough points
|
76 |
-
X = cluster_data['temperature'].values.reshape(-1, 1)
|
77 |
-
y = cluster_data['humidity'].values
|
78 |
-
reg = LinearRegression().fit(X, y)
|
79 |
-
df.loc[df['cluster'] == cluster, 'predicted_humidity'] = reg.predict(X)
|
80 |
-
|
81 |
-
# Calculate temperature statistics
|
82 |
-
temp_stats = df['temperature'].describe()
|
83 |
-
avg_temp = temp_stats['mean']
|
84 |
-
min_temp = temp_stats['min']
|
85 |
-
max_temp = temp_stats['max']
|
86 |
-
std_temp = temp_stats['std']
|
87 |
-
|
88 |
-
# Create regression plot using Plotly
|
89 |
-
fig = px.scatter(df, x='temperature', y='humidity', color='cluster',
|
90 |
-
title='Temperature vs. Relative Humidity with Regression by Cluster')
|
91 |
-
|
92 |
-
# Add regression lines to the plot
|
93 |
-
for cluster in df['cluster'].unique():
|
94 |
-
cluster_data = df[df['cluster'] == cluster]
|
95 |
-
if 'predicted_humidity' in cluster_data.columns and not cluster_data['predicted_humidity'].isna().all():
|
96 |
-
fig.add_trace(go.Scatter(x=cluster_data['temperature'], y=cluster_data['predicted_humidity'], mode='lines',
|
97 |
-
name=f'Cluster {cluster}'))
|
98 |
-
|
99 |
-
# Column 1: Regression Plot, Data, and Statistics
|
100 |
-
col1, col2, col3 = st.columns([1.65, 2, 1.15])
|
101 |
-
|
102 |
-
with col1:
|
103 |
-
st.plotly_chart(fig, use_container_width=True, height=300)
|
104 |
-
st.caption(f"Data fetched at: {fetch_time}")
|
105 |
-
|
106 |
-
# Display temperature statistics
|
107 |
-
col_1, col_2 = st.columns([1, 1])
|
108 |
-
with col_1:
|
109 |
-
st.metric(label="Average Temperature (°C)", value=f"{avg_temp:.2f}")
|
110 |
-
st.metric(label="Minimum Temperature (°C)", value=f"{min_temp:.2f}")
|
111 |
-
with col_2:
|
112 |
-
st.metric(label="Maximum Temperature (°C)", value=f"{max_temp:.2f}")
|
113 |
-
st.metric(label="Std. Dev (°C)", value=f"{std_temp:.2f}")
|
114 |
-
|
115 |
-
# Column 2: Map
|
116 |
-
with col2:
|
117 |
-
# Initialize the Folium map
|
118 |
-
m = folium.Map(location=[22.320394086610452, 114.21626912476121], zoom_start=14, tiles='CartoDB positron')
|
119 |
-
|
120 |
-
# Define a color map for clusters
|
121 |
-
unique_clusters = df['cluster'].unique()
|
122 |
-
colors = cm.get_cmap('tab10', len(unique_clusters)) # Using 'tab10' colormap for up to 10 clusters
|
123 |
-
cluster_colors = {cluster: mcolors.to_hex(colors(i)) for i, cluster in enumerate(unique_clusters)}
|
124 |
-
|
125 |
-
# Plot original data points
|
126 |
-
for _, row in df.iterrows():
|
127 |
-
folium.CircleMarker(
|
128 |
-
location=[row['latitude'], row['longitude']],
|
129 |
-
radius=5,
|
130 |
-
color=cluster_colors[row['cluster']],
|
131 |
-
fill=True,
|
132 |
-
fill_color=cluster_colors[row['cluster']],
|
133 |
-
fill_opacity=0.7,
|
134 |
-
popup=f"Temp: {row['temperature']} °C<br>Humidity: {row['humidity']} %<br>Feels Like: {row['feels_like']:.2f} °C<br>Cluster: {row['cluster']}"
|
135 |
-
).add_to(m)
|
136 |
-
|
137 |
-
# Calculate the average temperature for each cluster
|
138 |
-
cluster_centers = df.groupby('cluster').agg({
|
139 |
-
'latitude': 'mean',
|
140 |
-
'longitude': 'mean',
|
141 |
-
'temperature': 'mean'
|
142 |
-
}).reset_index()
|
143 |
-
|
144 |
-
# Plot cluster centers
|
145 |
-
for _, row in cluster_centers.iterrows():
|
146 |
-
folium.Marker(
|
147 |
-
location=[row['latitude'], row['longitude']],
|
148 |
-
icon=DivIcon(
|
149 |
-
icon_size=(150,36),
|
150 |
-
icon_anchor=(85, 20), # Adjusted anchor position to move text away from the point
|
151 |
-
html=f'<strong><div style="font-size: 15px; color: {cluster_colors[row["cluster"]]}">{row["temperature"]:.2f} °C</div></strong>'
|
152 |
-
),
|
153 |
-
popup=f"Cluster: {row['cluster']}<br>Avg Temp: {row['temperature']:.2f} °C"
|
154 |
-
).add_to(m)
|
155 |
-
|
156 |
-
# Display the map in Streamlit
|
157 |
-
st_folium(m, width=500, height=600)
|
158 |
-
|
159 |
-
# Column 3: Data Table
|
160 |
-
with col3:
|
161 |
-
st.markdown(
|
162 |
-
"""
|
163 |
-
<style>
|
164 |
-
.dataframe-container {
|
165 |
-
height: 600px;
|
166 |
-
overflow-y: auto;
|
167 |
-
}
|
168 |
-
.dataframe th, .dataframe td {
|
169 |
-
text-align: left;
|
170 |
-
padding: 8px;
|
171 |
-
}
|
172 |
-
</style>
|
173 |
-
""",
|
174 |
-
unsafe_allow_html=True
|
175 |
-
)
|
176 |
-
# Display the DataFrame
|
177 |
-
st.dataframe(df[['latitude', 'longitude', 'temperature', 'humidity', 'feels_like', 'cluster']], height=600)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
pages/5_Past Records.py
DELETED
@@ -1,141 +0,0 @@
|
|
1 |
-
import streamlit as st
|
2 |
-
import pandas as pd
|
3 |
-
import plotly.express as px
|
4 |
-
import json
|
5 |
-
import os
|
6 |
-
import glob
|
7 |
-
|
8 |
-
# Directory paths
|
9 |
-
data_dir = 'past_temp'
|
10 |
-
geojson_file = os.path.join(data_dir, 'FavgTS.geojson')
|
11 |
-
|
12 |
-
# Load GeoJSON data
|
13 |
-
def load_geojson():
|
14 |
-
with open(geojson_file) as f:
|
15 |
-
return json.load(f)
|
16 |
-
|
17 |
-
# Create a dictionary to map short forms to long forms
|
18 |
-
def create_station_map(geojson):
|
19 |
-
feature_map = {}
|
20 |
-
for feature in geojson['features']:
|
21 |
-
short_name = feature['properties']['WeatherStationShortName']
|
22 |
-
long_name = feature['properties']['WeatherStationName_en']
|
23 |
-
feature_map[short_name] = long_name
|
24 |
-
return feature_map
|
25 |
-
|
26 |
-
# Load CSV files
|
27 |
-
def load_csv_files():
|
28 |
-
return glob.glob(os.path.join(data_dir, '*.csv'))
|
29 |
-
|
30 |
-
# Plot time series
|
31 |
-
def plot_time_series(df, station_name):
|
32 |
-
df['Date'] = pd.to_datetime(df['Date'], format='%Y%m%d', errors='coerce')
|
33 |
-
df['Year'] = df['Date'].dt.year
|
34 |
-
df['Month'] = df['Date'].dt.month
|
35 |
-
|
36 |
-
fig_all_years = px.line(df, x='Date', y='Value', color='Year',
|
37 |
-
title=f'All-Year Temperature Time Series for {station_name}',
|
38 |
-
labels={'Date': 'Date', 'Value': 'Temperature (°C)', 'Year': 'Year'},
|
39 |
-
line_shape='linear')
|
40 |
-
fig_all_years.update_layout(xaxis_title='Date', yaxis_title='Temperature (°C)')
|
41 |
-
|
42 |
-
return fig_all_years
|
43 |
-
|
44 |
-
# Plot monthly averages
|
45 |
-
def plot_monthly_averages(df, station_name):
|
46 |
-
df['Date'] = pd.to_datetime(df['Date'], format='%Y%m%d', errors='coerce')
|
47 |
-
df['Year'] = df['Date'].dt.year
|
48 |
-
df['Month'] = df['Date'].dt.month
|
49 |
-
|
50 |
-
monthly_avg = df.groupby(['Year', 'Month'])['Value'].mean().reset_index()
|
51 |
-
|
52 |
-
fig_monthly_avg = px.line(monthly_avg, x='Month', y='Value', color='Year',
|
53 |
-
title=f'Monthly Average Temperature Time Series for {station_name}',
|
54 |
-
labels={'Month': 'Month', 'Value': 'Average Temperature (°C)', 'Year': 'Year'},
|
55 |
-
line_shape='linear')
|
56 |
-
fig_monthly_avg.update_layout(xaxis_title='Month', yaxis_title='Average Temperature (°C)', xaxis_tickformat='%b')
|
57 |
-
|
58 |
-
return fig_monthly_avg
|
59 |
-
|
60 |
-
def plot_annual_average(df, station_name):
|
61 |
-
annual_avg = df.groupby('Year')['Value'].mean().reset_index()
|
62 |
-
|
63 |
-
fig_annual_avg = px.line(annual_avg, x='Year', y='Value',
|
64 |
-
title=f'Annual Average Temperature Trend for {station_name}',
|
65 |
-
labels={'Year': 'Year', 'Value': 'Average Temperature (°C)'},
|
66 |
-
line_shape='linear')
|
67 |
-
fig_annual_avg.update_layout(xaxis_title='Year', yaxis_title='Average Temperature (°C)')
|
68 |
-
|
69 |
-
return fig_annual_avg
|
70 |
-
|
71 |
-
# Streamlit app layout
|
72 |
-
st.set_page_config(layout="wide", page_title="Temperature Time Series")
|
73 |
-
|
74 |
-
# Load GeoJSON and create mapping
|
75 |
-
geojson = load_geojson()
|
76 |
-
station_map = create_station_map(geojson)
|
77 |
-
|
78 |
-
# Load all CSV files
|
79 |
-
csv_files = load_csv_files()
|
80 |
-
|
81 |
-
# Initialize data storage for all CSV files
|
82 |
-
all_data = []
|
83 |
-
|
84 |
-
# Process each CSV file
|
85 |
-
for file in csv_files:
|
86 |
-
try:
|
87 |
-
file_name = os.path.basename(file)
|
88 |
-
short_form = file_name.split('.')[0] # Get the file name without extension
|
89 |
-
|
90 |
-
df = pd.read_csv(file)
|
91 |
-
|
92 |
-
if df.shape[1] < 2:
|
93 |
-
st.error(f"File {file} does not have the expected number of columns. Skipping.")
|
94 |
-
continue
|
95 |
-
|
96 |
-
if df.columns[0] != 'Date':
|
97 |
-
df.columns = ['Date', 'Value']
|
98 |
-
|
99 |
-
long_form = station_map.get(short_form, "Unknown Station")
|
100 |
-
df['Station'] = long_form
|
101 |
-
all_data.append(df)
|
102 |
-
|
103 |
-
except Exception as e:
|
104 |
-
st.error(f"Error loading or processing file {file}: {e}")
|
105 |
-
|
106 |
-
# Combine all data into a single DataFrame
|
107 |
-
if all_data:
|
108 |
-
combined_df = pd.concat(all_data, ignore_index=True)
|
109 |
-
combined_df['Date'] = pd.to_datetime(combined_df['Date'], format='%Y%m%d', errors='coerce')
|
110 |
-
combined_df = combined_df.dropna(subset=['Date'])
|
111 |
-
combined_df['Year'] = combined_df['Date'].dt.year
|
112 |
-
combined_df['Month'] = combined_df['Date'].dt.month
|
113 |
-
|
114 |
-
stations = combined_df['Station'].unique()
|
115 |
-
default_station = stations[0] if len(stations) > 0 else None
|
116 |
-
|
117 |
-
if not stations.size:
|
118 |
-
st.write("No stations available in the data.")
|
119 |
-
else:
|
120 |
-
st.subheader('Past Daily Average Temperature Time Series')
|
121 |
-
selected_station = st.selectbox("Select a Station", options=stations, index=0)
|
122 |
-
|
123 |
-
station_data = combined_df[combined_df['Station'] == selected_station]
|
124 |
-
|
125 |
-
if not station_data.empty:
|
126 |
-
# Create two columns for plots
|
127 |
-
col1, col2 = st.columns([2,1.5])
|
128 |
-
|
129 |
-
# Top plot: All-year time series
|
130 |
-
with col1:
|
131 |
-
fig_all_years = plot_time_series(station_data, selected_station)
|
132 |
-
st.plotly_chart(fig_all_years, use_container_width=True)
|
133 |
-
|
134 |
-
# Bottom plot: Monthly average temperatures
|
135 |
-
with col2:
|
136 |
-
fig_monthly_avg = plot_monthly_averages(station_data, selected_station)
|
137 |
-
st.plotly_chart(fig_monthly_avg, use_container_width=True)
|
138 |
-
else:
|
139 |
-
st.write(f"No data available for the selected station '{selected_station}'.")
|
140 |
-
else:
|
141 |
-
st.write("No data to display.")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
pages/6_Heat island.py
DELETED
@@ -1,183 +0,0 @@
|
|
1 |
-
import streamlit as st
|
2 |
-
import folium
|
3 |
-
import json
|
4 |
-
import plotly.express as px
|
5 |
-
import pandas as pd
|
6 |
-
from streamlit_folium import st_folium
|
7 |
-
import plotly.graph_objs as go
|
8 |
-
|
9 |
-
st.set_page_config(layout="wide", page_title="Heat Island Effect Analysis")
|
10 |
-
|
11 |
-
def load_geojson(filepath):
|
12 |
-
with open(filepath, 'r', encoding='utf-8') as f:
|
13 |
-
return json.load(f)
|
14 |
-
|
15 |
-
def plot_geojson(feature_group, geojson_data, property_name, colormap):
|
16 |
-
folium.GeoJson(
|
17 |
-
geojson_data,
|
18 |
-
style_function=lambda feature: {
|
19 |
-
'fillColor': colormap(feature['properties'][property_name]),
|
20 |
-
'color': 'black',
|
21 |
-
'weight': 1,
|
22 |
-
'fillOpacity': 0.7,
|
23 |
-
},
|
24 |
-
popup=folium.GeoJsonPopup(fields=['NAME_EN', property_name], aliases=['District:', 'Value:']),
|
25 |
-
).add_to(feature_group)
|
26 |
-
|
27 |
-
def compute_difference_geojson(geojson_2013, geojson_2023):
|
28 |
-
difference_geojson = {"type": "FeatureCollection", "features": []}
|
29 |
-
|
30 |
-
name_to_hot_nights_2013 = {
|
31 |
-
feature['properties']['NAME_EN']: feature['properties']['Hot_Nights']
|
32 |
-
for feature in geojson_2013['features']
|
33 |
-
}
|
34 |
-
|
35 |
-
for feature in geojson_2023['features']:
|
36 |
-
name_en = feature['properties']['NAME_EN']
|
37 |
-
hot_nights_2013 = name_to_hot_nights_2013.get(name_en, 0)
|
38 |
-
hot_nights_2023 = feature['properties']['Hot_Nights']
|
39 |
-
difference = hot_nights_2023 - hot_nights_2013
|
40 |
-
|
41 |
-
feature['properties']['Difference'] = difference
|
42 |
-
difference_geojson['features'].append(feature)
|
43 |
-
|
44 |
-
return difference_geojson
|
45 |
-
|
46 |
-
def geojson_to_dataframe(geojson_data, year):
|
47 |
-
features = geojson_data['features']
|
48 |
-
data = {
|
49 |
-
'District': [feature['properties']['NAME_EN'] for feature in features],
|
50 |
-
'Hot_Nights': [feature['properties']['Hot_Nights'] for feature in features],
|
51 |
-
'Year': [year] * len(features) # Add year column
|
52 |
-
}
|
53 |
-
return pd.DataFrame(data)
|
54 |
-
|
55 |
-
geojson_2013 = load_geojson('ref/2013_hot.geojson')
|
56 |
-
geojson_2023 = load_geojson('ref/2023_hot.geojson')
|
57 |
-
|
58 |
-
hot_nights_2013 = [feature['properties']['Hot_Nights'] for feature in geojson_2013['features']]
|
59 |
-
hot_nights_2023 = [feature['properties']['Hot_Nights'] for feature in geojson_2023['features']]
|
60 |
-
all_hot_nights = hot_nights_2013 + hot_nights_2023
|
61 |
-
|
62 |
-
colormap = folium.LinearColormap(
|
63 |
-
colors=['white', 'orange', 'red'],
|
64 |
-
vmin=min(all_hot_nights),
|
65 |
-
vmax=max(all_hot_nights),
|
66 |
-
caption='Hot Nights'
|
67 |
-
)
|
68 |
-
|
69 |
-
difference_geojson = compute_difference_geojson(geojson_2013, geojson_2023)
|
70 |
-
|
71 |
-
diff_colormap = folium.LinearColormap(
|
72 |
-
colors=['blue', 'lightblue', 'white', 'pink', 'red'],
|
73 |
-
index=[-50, -10, 0, 10, 50],
|
74 |
-
vmin=-50,
|
75 |
-
vmax=50,
|
76 |
-
caption='Change in Hot Nights'
|
77 |
-
)
|
78 |
-
|
79 |
-
m = folium.Map(location=[22.35994791346238, 114.15924623933743], zoom_start=11, tiles='CartoDB positron')
|
80 |
-
|
81 |
-
feature_group_2013 = folium.FeatureGroup(name='2013 Hot Nights', show=False)
|
82 |
-
feature_group_2023 = folium.FeatureGroup(name='2023 Hot Nights', show=False)
|
83 |
-
feature_group_diff = folium.FeatureGroup(name='Change in Hot Nights', show=True)
|
84 |
-
|
85 |
-
plot_geojson(feature_group_2013, geojson_2013, 'Hot_Nights', colormap)
|
86 |
-
plot_geojson(feature_group_2023, geojson_2023, 'Hot_Nights', colormap)
|
87 |
-
plot_geojson(feature_group_diff, difference_geojson, 'Difference', diff_colormap)
|
88 |
-
|
89 |
-
feature_group_2013.add_to(m)
|
90 |
-
feature_group_2023.add_to(m)
|
91 |
-
feature_group_diff.add_to(m)
|
92 |
-
|
93 |
-
layer_control = folium.LayerControl().add_to(m)
|
94 |
-
|
95 |
-
colormap.add_to(m)
|
96 |
-
diff_colormap.add_to(m)
|
97 |
-
|
98 |
-
df_2013 = geojson_to_dataframe(geojson_2013, '2013')
|
99 |
-
df_2023 = geojson_to_dataframe(geojson_2023, '2023')
|
100 |
-
|
101 |
-
combined_df = pd.concat([df_2013, df_2023])
|
102 |
-
|
103 |
-
def plot_combined_box_plot(df):
|
104 |
-
fig = px.box(
|
105 |
-
df,
|
106 |
-
x='Year',
|
107 |
-
y='Hot_Nights',
|
108 |
-
title='Hot Nights (2013 vs 2023)',
|
109 |
-
labels={'Hot_Nights': 'Number of Hot Nights', 'Year': 'Year'},
|
110 |
-
color='Year'
|
111 |
-
)
|
112 |
-
fig.update_layout(
|
113 |
-
yaxis_title='Number of Hot Nights',
|
114 |
-
boxmode='group'
|
115 |
-
)
|
116 |
-
return fig
|
117 |
-
|
118 |
-
data_table = pd.read_csv('ref/final_summary_with_available_stations.csv')
|
119 |
-
|
120 |
-
stations = data_table['station_name'].unique()
|
121 |
-
|
122 |
-
col1, col2, col3 = st.columns([1.35, 2, 1.1])
|
123 |
-
|
124 |
-
with col1:
|
125 |
-
st.subheader('Heat Island Effect')
|
126 |
-
st.caption(
|
127 |
-
'The "heat island effect" refers to the temperature difference between urban and rural areas, particularly at night.')
|
128 |
-
st.caption(
|
129 |
-
'This phenomenon is a result of the urbanization and development processes. During the day, the urban environment (such as cement pavement) absorbs and stores more heat from solar insolation compared to rural areas (vegetation). This heat is then slowly released in the evening and nighttime, leading to higher temperatures in the urban areas.')
|
130 |
-
|
131 |
-
selected_station = st.selectbox('Select a Station:', options=stations)
|
132 |
-
|
133 |
-
filtered_data_table = data_table[data_table['station_name'] == selected_station]
|
134 |
-
|
135 |
-
fig = go.Figure()
|
136 |
-
|
137 |
-
fig.add_trace(go.Scatter(
|
138 |
-
x=filtered_data_table['month'],
|
139 |
-
y=filtered_data_table['13day_temp'],
|
140 |
-
mode='lines+markers',
|
141 |
-
name='2013 Day Temp',
|
142 |
-
line=dict(color='blue')
|
143 |
-
))
|
144 |
-
fig.add_trace(go.Scatter(
|
145 |
-
x=filtered_data_table['month'],
|
146 |
-
y=filtered_data_table['13night_temp'],
|
147 |
-
mode='lines+markers',
|
148 |
-
name='2013 Night Temp',
|
149 |
-
line=dict(color='blue', dash='dash')
|
150 |
-
))
|
151 |
-
fig.add_trace(go.Scatter(
|
152 |
-
x=filtered_data_table['month'],
|
153 |
-
y=filtered_data_table['23day_temp'],
|
154 |
-
mode='lines+markers',
|
155 |
-
name='2023 Day Temp',
|
156 |
-
line=dict(color='red')
|
157 |
-
))
|
158 |
-
fig.add_trace(go.Scatter(
|
159 |
-
x=filtered_data_table['month'],
|
160 |
-
y=filtered_data_table['23night_temp'],
|
161 |
-
mode='lines+markers',
|
162 |
-
name='2023 Night Temp',
|
163 |
-
line=dict(color='red', dash='dash')
|
164 |
-
))
|
165 |
-
|
166 |
-
fig.update_layout(
|
167 |
-
title=f'Temperature Comparison',
|
168 |
-
xaxis_title='Month',
|
169 |
-
yaxis_title='Temperature (°C)',
|
170 |
-
legend_title='Legend',
|
171 |
-
height =300
|
172 |
-
)
|
173 |
-
|
174 |
-
st.plotly_chart(fig, height=180)
|
175 |
-
|
176 |
-
with col2:
|
177 |
-
st_folium(m, width=550, height=650)
|
178 |
-
|
179 |
-
with col3:
|
180 |
-
st.caption(
|
181 |
-
'From data from the CO-WIN network, there has been a significant increase in the number of hot nights in Hong Kong. "Hot nights" refers to nights where the temperature remains above 28 degrees. Within the period from 2013 to 2023, 9 districts in Hong Kong have experienced an increase in the frequency of hot nights, the most significant are those in the urban.')
|
182 |
-
|
183 |
-
st.plotly_chart(plot_combined_box_plot(combined_df), use_container_width=True ,height=380)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
pages/7_CoWIN.py
DELETED
@@ -1,172 +0,0 @@
|
|
1 |
-
import streamlit as st
|
2 |
-
import requests
|
3 |
-
import json
|
4 |
-
import pandas as pd
|
5 |
-
import folium
|
6 |
-
from streamlit_folium import st_folium
|
7 |
-
import plotly.graph_objects as go
|
8 |
-
import numpy as np
|
9 |
-
from datetime import datetime
|
10 |
-
from branca.colormap import LinearColormap
|
11 |
-
import pytz
|
12 |
-
|
13 |
-
st.set_page_config(layout="wide", page_title="Real-Time CoWIN Weather Data Dashboard")
|
14 |
-
|
15 |
-
@st.cache_data(ttl=300) # Cache data for 5 minutes (300 seconds)
|
16 |
-
def fetch_data():
|
17 |
-
hk_tz = pytz.timezone('Asia/Hong_Kong')
|
18 |
-
current_time = datetime.now(hk_tz).strftime('%Y-%m-%dT%H:%M:%S')
|
19 |
-
url = f'https://cowin.hku.hk/API/data/CoWIN/map?time={current_time}'
|
20 |
-
response = requests.get(url)
|
21 |
-
return json.loads(response.text), current_time
|
22 |
-
|
23 |
-
data, fetched_time = fetch_data()
|
24 |
-
|
25 |
-
features = data
|
26 |
-
df = pd.json_normalize(features)
|
27 |
-
|
28 |
-
df.rename(columns={
|
29 |
-
'station': 'Station',
|
30 |
-
'temp': 'Temperature',
|
31 |
-
'lat': 'Latitude',
|
32 |
-
'lon': 'Longitude',
|
33 |
-
'wd': 'Wind Direction',
|
34 |
-
'ws': 'Wind Speed',
|
35 |
-
'rh': 'Relative Humidity',
|
36 |
-
'uv': 'UV Radiation',
|
37 |
-
'me_name': 'Name'
|
38 |
-
}, inplace=True)
|
39 |
-
|
40 |
-
attribute = st.selectbox(
|
41 |
-
'Select Weather Attributes to Plot and Map (Data from HKO-HKU CoWIN)',
|
42 |
-
['Temperature', 'Wind Speed', 'Relative Humidity', 'UV Radiation']
|
43 |
-
)
|
44 |
-
|
45 |
-
col1, col2, col3 = st.columns([1.65, 2, 1.2])
|
46 |
-
|
47 |
-
with col1:
|
48 |
-
attr_series = pd.Series(df[attribute].dropna())
|
49 |
-
|
50 |
-
hist_data = np.histogram(attr_series, bins=10)
|
51 |
-
bin_edges = hist_data[1]
|
52 |
-
counts = hist_data[0]
|
53 |
-
|
54 |
-
def get_color(value, min_value, max_value):
|
55 |
-
ratio = (value - min_value) / (max_value - min_value)
|
56 |
-
r = int(255 * ratio)
|
57 |
-
b = int(255 * (1 - ratio))
|
58 |
-
return f'rgb({r}, 0, {b})'
|
59 |
-
|
60 |
-
fig = go.Figure()
|
61 |
-
|
62 |
-
for i in range(len(bin_edges) - 1):
|
63 |
-
bin_center = (bin_edges[i] + bin_edges[i + 1]) / 2
|
64 |
-
color = get_color(bin_center, bin_edges.min(), bin_edges.max())
|
65 |
-
fig.add_trace(go.Bar(
|
66 |
-
x=[f'{bin_edges[i]:.1f} - {bin_edges[i + 1]:.1f}'],
|
67 |
-
y=[counts[i]],
|
68 |
-
marker_color=color,
|
69 |
-
name=f'{bin_edges[i]:.1f} - {bin_edges[i + 1]:.1f}'
|
70 |
-
))
|
71 |
-
|
72 |
-
fig.update_layout(
|
73 |
-
xaxis_title=f'{attribute}',
|
74 |
-
yaxis_title='Count',
|
75 |
-
title=f'{attribute} Distribution',
|
76 |
-
bargap=0.2,
|
77 |
-
title_font_size=20,
|
78 |
-
xaxis_title_font_size=14,
|
79 |
-
yaxis_title_font_size=14,
|
80 |
-
height=350,
|
81 |
-
xaxis=dict(title_font_size=14),
|
82 |
-
yaxis=dict(title_font_size=14)
|
83 |
-
)
|
84 |
-
|
85 |
-
st.plotly_chart(fig, use_container_width=True)
|
86 |
-
st.caption(f"Data fetched at: {fetched_time}")
|
87 |
-
|
88 |
-
with st.container():
|
89 |
-
col_1, col_2 = st.columns([1, 1])
|
90 |
-
with col_1:
|
91 |
-
if attr_series.size > 0:
|
92 |
-
avg_attr = np.mean(attr_series)
|
93 |
-
std_attr = np.std(attr_series)
|
94 |
-
max_attr = np.max(attr_series)
|
95 |
-
min_attr = np.min(attr_series)
|
96 |
-
|
97 |
-
st.metric(label=f"Average {attribute}", value=f"{avg_attr:.2f}")
|
98 |
-
st.metric(label=f"Minimum {attribute}", value=f"{min_attr:.2f}")
|
99 |
-
with col_2:
|
100 |
-
st.metric(label=f"Maximum {attribute}", value=f"{max_attr:.2f}")
|
101 |
-
st.metric(label=f"Std. Dev {attribute}", value=f"{std_attr:.2f}")
|
102 |
-
|
103 |
-
def attribute_to_color(value, min_value, max_value):
|
104 |
-
"""Convert a value to a color based on the gradient."""
|
105 |
-
ratio = (value - min_value) / (max_value - min_value)
|
106 |
-
return LinearColormap(['blue', 'purple', 'red']).rgb_hex_str(ratio)
|
107 |
-
|
108 |
-
with col2:
|
109 |
-
m = folium.Map(location=[22.3547, 114.1483], zoom_start=11, tiles='CartoDB positron')
|
110 |
-
|
111 |
-
min_value = df[attribute].min()
|
112 |
-
max_value = df[attribute].max()
|
113 |
-
|
114 |
-
for _, row in df.iterrows():
|
115 |
-
lat = row['Latitude']
|
116 |
-
lon = row['Longitude']
|
117 |
-
station = row['Station']
|
118 |
-
name = row['Name']
|
119 |
-
value = row[attribute]
|
120 |
-
|
121 |
-
color = attribute_to_color(value, min_value, max_value) if pd.notna(value) else 'gray'
|
122 |
-
|
123 |
-
folium.Marker(
|
124 |
-
location=[lat, lon],
|
125 |
-
popup=(
|
126 |
-
f"<p style='font-size: 12px; background-color: white; padding: 5px; border-radius: 5px;'>"
|
127 |
-
f"Station: {station}<br>"
|
128 |
-
f"Name: {name}<br>"
|
129 |
-
f"{attribute}: {value}<br>"
|
130 |
-
f"</p>"
|
131 |
-
),
|
132 |
-
icon=folium.DivIcon(
|
133 |
-
html=f'<div style="font-size: 10pt; color: {color}; padding: 2px; border-radius: 5px;">'
|
134 |
-
f'<strong>{value}</strong></div>'
|
135 |
-
)
|
136 |
-
).add_to(m)
|
137 |
-
|
138 |
-
# Create a color scale legend
|
139 |
-
colormap = folium.LinearColormap(
|
140 |
-
colors=['blue', 'purple', 'red'],
|
141 |
-
index=[min_value, (min_value + max_value) / 2, max_value],
|
142 |
-
vmin=min_value,
|
143 |
-
vmax=max_value,
|
144 |
-
caption=f'{attribute}'
|
145 |
-
)
|
146 |
-
colormap.add_to(m)
|
147 |
-
|
148 |
-
st_folium(m, width=530, height=600)
|
149 |
-
|
150 |
-
with col3:
|
151 |
-
st.markdown(
|
152 |
-
"""
|
153 |
-
<style>
|
154 |
-
.dataframe-container {
|
155 |
-
height: 600px;
|
156 |
-
overflow-y: auto;
|
157 |
-
}
|
158 |
-
</style>
|
159 |
-
""",
|
160 |
-
unsafe_allow_html=True
|
161 |
-
)
|
162 |
-
|
163 |
-
st.dataframe(df[['Station', 'Name', 'Temperature', 'Wind Speed', 'Relative Humidity', 'UV Radiation', 'Latitude', 'Longitude']], height=600)
|
164 |
-
|
165 |
-
if st.button("Refresh Data"):
|
166 |
-
st.experimental_rerun()
|
167 |
-
|
168 |
-
hk_tz = pytz.timezone('Asia/Hong_Kong')
|
169 |
-
current_time = datetime.now(hk_tz)
|
170 |
-
if 'last_ran' not in st.session_state or (current_time - st.session_state.last_ran.replace(tzinfo=hk_tz)).total_seconds() > 300:
|
171 |
-
st.session_state.last_ran = current_time
|
172 |
-
st.experimental_rerun()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|