deagar commited on
Commit
60823be
·
1 Parent(s): 86822b0

initial commit

Browse files
Dockerfile ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Use a lightweight Python base image
2
+ FROM python:3.10-slim
3
+
4
+ # Set the working directory inside the container
5
+ WORKDIR /app
6
+
7
+ # Install system-level dependencies (optional, if needed)
8
+ # RUN apt-get update && apt-get install -y <dependencies>
9
+
10
+ # Copy the requirements file into the container
11
+ COPY requirements.txt .
12
+
13
+ # Install Python dependencies
14
+ RUN pip install --no-cache-dir -r requirements.txt
15
+
16
+ # Copy the rest of the application files into the container
17
+ COPY . .
18
+
19
+ # Expose the port the app runs on
20
+ EXPOSE 8050
21
+
22
+ # Command to run your Dash app using gunicorn
23
+ CMD ["gunicorn", "--bind", "0.0.0.0:8050", "app:server"]
24
+
app.py ADDED
@@ -0,0 +1,300 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import dash
3
+ from dash import dcc, html, Input, Output
4
+ import plotly.express as px
5
+ import plotly.graph_objects as go
6
+ import pandas as pd
7
+
8
+ # -----------------------------------------
9
+ # Load and Prepare Data
10
+ # -----------------------------------------
11
+ df = pd.read_csv("./data/megawatt_demand_2024.csv") # Replace with your actual filename
12
+ df['timestamp'] = pd.to_datetime(df['UTC Timestamp (Interval Ending)'])
13
+
14
+ load_columns = [
15
+ "Connecticut Actual Load (MW)",
16
+ "Maine Actual Load (MW)",
17
+ "New Hampshire Actual Load (MW)",
18
+ "Northeast Massachusetts Actual Load (MW)",
19
+ "Rhode Island Actual Load (MW)",
20
+ "Southeast Massachusetts Actual Load (MW)",
21
+ "Vermont Actual Load (MW)",
22
+ "Western/Central Massachusetts Actual Load (MW)"
23
+ ]
24
+
25
+ df_melted = df.melt(
26
+ id_vars=['timestamp'],
27
+ value_vars=load_columns,
28
+ var_name='region',
29
+ value_name='load_mw'
30
+ )
31
+
32
+ # Clean region names
33
+ df_melted['region'] = df_melted['region'].str.replace(' Actual Load \(MW\)', '', regex=True)
34
+
35
+ # Compute daily aggregates
36
+ df_melted['date'] = df_melted['timestamp'].dt.date
37
+ daily_agg = df_melted.groupby(['region', 'date']).agg(
38
+ daily_avg=('load_mw', 'mean'),
39
+ daily_min=('load_mw', 'min'),
40
+ daily_max=('load_mw', 'max')
41
+ ).reset_index()
42
+
43
+ print(df_melted.head())
44
+ print(daily_agg.head())
45
+ # Load GeoJSON
46
+ with open('./data/new_england_geojson.json') as f:
47
+ geojson = json.load(f)
48
+
49
+ # Define a color map for the regions
50
+ region_colors = {
51
+ "Connecticut": "#1f77b4",
52
+ "Maine": "#ff7f0e",
53
+ "New Hampshire": "#2ca02c",
54
+ "Northeast Massachusetts": "#d62728",
55
+ "Rhode Island": "#9467bd",
56
+ "Southeast Massachusetts": "#8c564b",
57
+ "Vermont": "#e377c2",
58
+ "Western/Central Massachusetts": "#7f7f7f"
59
+ }
60
+
61
+ # Get unique dates for slider (daily granularity)
62
+ unique_dates = sorted(daily_agg['date'].unique())
63
+
64
+ # Identify month start dates
65
+ month_starts = [(i, d) for i, d in enumerate(unique_dates) if d.day == 1]
66
+
67
+ # Create marks only for the first of each month
68
+ date_marks = {i: d.strftime("%Y-%m-%d") for i, d in month_starts}
69
+
70
+ # Initial state: use the full range of dates
71
+ start_idx = 0
72
+ end_idx = len(unique_dates) - 1
73
+ start_date = unique_dates[start_idx]
74
+ end_date = unique_dates[end_idx]
75
+
76
+ # Create initial figures
77
+ latest_time = df_melted['timestamp'].max()
78
+ df_latest = df_melted[df_melted['timestamp'] == latest_time]
79
+ df_avg = df_melted.groupby('region').mean().reset_index()
80
+ print(df_avg.head())
81
+
82
+ # Create a weekly-peak load by region chart for before clickthrough
83
+ df_melted['week'] = df_melted['timestamp'].dt.to_period('W').apply(lambda r: r.start_time)
84
+
85
+ fig_map = px.choropleth_mapbox(
86
+ df_avg,
87
+ geojson=geojson,
88
+ locations='region',
89
+ featureidkey='properties.NAME',
90
+ color='load_mw',
91
+ color_continuous_scale="Viridis",
92
+ mapbox_style="carto-positron",
93
+ zoom=5,
94
+ center={"lat": 43.5, "lon": -71.5}, # Approx center of New England
95
+ opacity=0.7,
96
+ hover_name='region'
97
+ )
98
+ fig_map.update_layout(margin={"r":0,"t":0,"l":0,"b":0}, template='plotly_dark')
99
+
100
+ # Initial line plot: all regions
101
+ fig_line_all = px.line(
102
+ df_melted,
103
+ x='timestamp',
104
+ y='load_mw',
105
+ color='region',
106
+ title='Load Over Time',
107
+ labels={'load_mw':'Load (MW)', 'timestamp':'Time'},
108
+ template='plotly_dark',
109
+ color_discrete_map=region_colors
110
+ )
111
+ fig_line_all.update_layout(hovermode="x unified")
112
+
113
+ # Initial daily aggregates plot (blank or show all)
114
+ # Initialize daily load figure
115
+ fig_daily = go.Figure(layout={"template":"plotly_dark"})
116
+ fig_daily.update_layout(title="Daily Aggregate Load", xaxis_title=None, yaxis_title="Load (MW)")
117
+
118
+ # -----------------------------------------
119
+ # Dash App
120
+ # -----------------------------------------
121
+ app = dash.Dash(__name__)
122
+
123
+ # Expose the Flask server instance
124
+ server = app.server
125
+
126
+ app.layout = html.Div(
127
+ style={"backgroundColor": "#333", "color": "#fff", "padding": "20px"}, # Dark background
128
+ children=[
129
+ html.H1("ISO-New England Grid Loading, 2024", style={"textAlign": "center"}),
130
+ html.Div([
131
+ html.Div([
132
+ html.H4('Average Load by ISO-NE Region'),
133
+ html.P("Click to filter by region"),
134
+ dcc.Graph(id='map', figure=fig_map, style={"height": "60vh"}),
135
+
136
+ #Markdown descriptor
137
+ dcc.Markdown(
138
+ """
139
+ **ISO-New England Load by Region:**
140
+ This dashboard provides an interactive visualization of electricity
141
+ usage across New England states and Massachusetts sub-regions.
142
+ Use the date range slider and map to filter and explore trends in grid demand over time.
143
+
144
+ [Data from ISO-NE](https://www.eia.gov/electricity/wholesalemarkets/isone.php)
145
+ """,
146
+ style={"margin-top": "20px", "height": "30vh", "overflowY": "auto"}
147
+ )
148
+ ], style={"width": "40%", "display": "inline-block", "vertical-align": "top"}),
149
+
150
+ html.Div([
151
+ dcc.Graph(id='timeseries', figure=fig_line_all, style={"height": "60vh"}),
152
+ dcc.Graph(id='daily_timeseries', figure=fig_daily, style={"height": "60vh", "marginTop":"20px"})
153
+ ], style={"width": "58%", "display": "inline-block", "padding-left":"2%", "vertical-align": "top"})
154
+ ]),
155
+
156
+ html.Div([
157
+ dcc.RangeSlider(
158
+ id='date-range-slider',
159
+ min=0,
160
+ max=len(unique_dates)-1,
161
+ value=[275, 306], #Month of Oct.
162
+ marks=date_marks,
163
+ step=1,
164
+ tooltip=None
165
+ ),
166
+ ], style={"margin-bottom":"20px"})
167
+ ]
168
+ )
169
+
170
+ @app.callback(
171
+ [Output('map', 'figure'),
172
+ Output('timeseries', 'figure'),
173
+ Output('daily_timeseries', 'figure')],
174
+ [Input('map', 'clickData'),
175
+ Input('date-range-slider', 'value')]
176
+ )
177
+ def update_charts(clickData, slider_value):
178
+ start_idx, end_idx = slider_value
179
+ start_date = unique_dates[start_idx]
180
+ end_date = unique_dates[end_idx]
181
+
182
+ df_map_day = df_melted[(df_melted['date'] >= start_date) & (df_melted['date'] <= end_date)].groupby(
183
+ 'region'
184
+ ).mean().reset_index()
185
+ df_line = df_melted[(df_melted['date'] >= start_date) & (df_melted['date'] <= end_date)]
186
+ df_line_daily = daily_agg[(daily_agg['date'] >= start_date) & (daily_agg['date'] <= end_date)]
187
+
188
+ #Weekly max
189
+ weekly_max = df_melted[
190
+ (df_melted['date'] >= start_date) & (df_melted['date'] <= end_date)
191
+ ].groupby(['region', 'week']).agg(weekly_max=('load_mw', 'max')).reset_index()
192
+
193
+ if clickData is None:
194
+ # No region clicked: show all regions
195
+ fig_map = px.choropleth_mapbox(
196
+ df_map_day,
197
+ geojson=geojson,
198
+ locations='region',
199
+ featureidkey='properties.NAME',
200
+ color='load_mw',
201
+ color_continuous_scale="Viridis",
202
+ mapbox_style="carto-positron",
203
+ zoom=5,
204
+ center={"lat": 43.5, "lon": -71.5},
205
+ opacity=0.7,
206
+ hover_name='region'
207
+ )
208
+ fig_map.update_layout(margin={"r":0,"t":0,"l":0,"b":0}, template='plotly_dark')
209
+
210
+ fig_line = px.line(
211
+ df_line, x='timestamp', y='load_mw', color='region',
212
+ title='Load Over Time (Selected Date Range)',
213
+ labels={'load_mw':'Load (MW)', 'timestamp':'Time'},
214
+ template='plotly_dark',
215
+ color_discrete_map=region_colors
216
+ )
217
+ fig_line.update_layout(hovermode="x unified", xaxis_title=None)
218
+
219
+ fig_weekly_max = go.Figure(layout={"template":"plotly_dark"})
220
+ fig_weekly_max.update_layout(
221
+ title="Weekly Max Load by Region",
222
+ xaxis_title=None,
223
+ yaxis_title="Load (MW)",
224
+ hovermode="x unified"
225
+ )
226
+
227
+ for region in weekly_max['region'].unique():
228
+ dff = weekly_max[weekly_max['region'] == region]
229
+ region_color = region_colors.get(region, "white")
230
+ fig_weekly_max.add_trace(
231
+ go.Scatter(
232
+ x=dff['week'], y=dff['weekly_max'],
233
+ mode='lines+markers',
234
+ line=dict(color=region_color, width=2),
235
+ marker=dict(color=region_color, size=6),
236
+ name=f"{region} Weekly Max"
237
+ )
238
+ )
239
+
240
+ return fig_map, fig_line, fig_weekly_max
241
+
242
+ # Region clicked
243
+ clicked_region = clickData['points'][0]['location']
244
+ dff = df_line[df_line['region'] == clicked_region]
245
+ dff_daily = df_line_daily[df_line_daily['region'] == clicked_region]
246
+
247
+ fig_map = px.choropleth_mapbox(
248
+ df_map_day,
249
+ geojson=geojson,
250
+ locations='region',
251
+ featureidkey='properties.NAME',
252
+ color='load_mw',
253
+ color_continuous_scale="Viridis",
254
+ mapbox_style="carto-positron",
255
+ zoom=5,
256
+ center={"lat": 43.5, "lon": -71.5},
257
+ opacity=0.7,
258
+ hover_name='region'
259
+ )
260
+ fig_map.update_layout(margin={"r":0,"t":0,"l":0,"b":0}, template='plotly_dark')
261
+
262
+ fig_line = px.line(
263
+ dff, x='timestamp', y='load_mw', color='region',
264
+ title=f'Load Over Time: {clicked_region} ({start_date} to {end_date})',
265
+ labels={'load_mw':'Load (MW)', 'timestamp':'Time'},
266
+ template='plotly_dark',
267
+ color_discrete_map=region_colors
268
+ )
269
+ fig_line.update_layout(hovermode="x unified")
270
+
271
+ fig_daily = go.Figure(layout={"template":"plotly_dark"})
272
+ region_color = region_colors.get(clicked_region, "white")
273
+
274
+ if not dff_daily.empty:
275
+ fig_daily.add_trace(go.Scatter(
276
+ x=dff_daily['date'], y=dff_daily['daily_max'],
277
+ mode='lines', line_color=region_color,
278
+ name='Daily Max'
279
+ ))
280
+ fig_daily.add_trace(go.Scatter(
281
+ x=dff_daily['date'], y=dff_daily['daily_min'],
282
+ fill='tonexty', mode='lines', line_color=region_color,
283
+ name='Daily Min'
284
+ ))
285
+ fig_daily.add_trace(go.Scatter(
286
+ x=dff_daily['date'], y=dff_daily['daily_avg'],
287
+ mode='lines+markers', line_color='white', name='Daily Avg'
288
+ ))
289
+
290
+ fig_daily.update_layout(
291
+ title=f"Daily Load Summary: {clicked_region}",
292
+ xaxis_title="Date",
293
+ yaxis_title="Load (MW)",
294
+ hovermode="x unified"
295
+ )
296
+
297
+ return fig_map, fig_line, fig_daily
298
+
299
+ if __name__ == '__main__':
300
+ app.run_server(host='0.0.0.0', port=8050, debug=False)
data/gz_2010_us_040_00_500k.json ADDED
The diff for this file is too large to render. See raw diff
 
data/iso_ne_geojson.json ADDED
The diff for this file is too large to render. See raw diff
 
data/megawatt_demand_2024.csv ADDED
The diff for this file is too large to render. See raw diff
 
data/new_england_geojson.json ADDED
The diff for this file is too large to render. See raw diff
 
data/references.md ADDED
@@ -0,0 +1 @@
 
 
1
+ Thank you to the [US Energy Information Administration](https://www.eia.gov/electricity/wholesalemarkets/data.php?rto=isone) for the data.
geo_json_extract.py ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import os
3
+
4
+ print(f'Current wd: {os.getcwd()}')
5
+
6
+ # Define the states you want to keep
7
+ new_england_states = {"Maine", "New Hampshire", "Vermont", "Massachusetts", "Rhode Island", "Connecticut"}
8
+
9
+ # Input and output file paths
10
+ input_file = "./figure_friday/2024/week_49/data/gz_2010_us_040_00_500k.json"
11
+ output_file = "./figure_friday/2024/week_49/data/new_england_geojson.json"
12
+
13
+ # Load the input GeoJSON
14
+ with open(input_file, "r", encoding="utf-8") as f:
15
+ data = json.load(f)
16
+
17
+ # Filter the features
18
+ filtered_features = []
19
+ for feature in data["features"]:
20
+ if feature["properties"].get("NAME") in new_england_states:
21
+ filtered_features.append(feature)
22
+
23
+ # Create a new GeoJSON FeatureCollection
24
+ filtered_data = {
25
+ "type": "FeatureCollection",
26
+ "features": filtered_features
27
+ }
28
+
29
+ # Write the filtered data to a new file
30
+ with open(output_file, "w", encoding="utf-8") as f:
31
+ json.dump(filtered_data, f, indent=2)
32
+
33
+ print(f"Filtered GeoJSON saved to {output_file}")
notebooks/newengland_power_demand.ipynb ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "markdown",
5
+ "metadata": {},
6
+ "source": [
7
+ "### #Figurefriday Week 49\n",
8
+ "\n",
9
+ "Getting amped up to take a look at power consumption rates across New England in 2024"
10
+ ]
11
+ },
12
+ {
13
+ "cell_type": "code",
14
+ "execution_count": null,
15
+ "metadata": {},
16
+ "outputs": [],
17
+ "source": [
18
+ "import pandas as pd\n",
19
+ "import plotly.express as px"
20
+ ]
21
+ }
22
+ ],
23
+ "metadata": {
24
+ "kernelspec": {
25
+ "display_name": "figure_friday",
26
+ "language": "python",
27
+ "name": "python3"
28
+ },
29
+ "language_info": {
30
+ "name": "python",
31
+ "version": "3.9.15"
32
+ }
33
+ },
34
+ "nbformat": 4,
35
+ "nbformat_minor": 2
36
+ }
requirements.txt ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ dash==2.18.2
2
+ dash-core-components==2.0.0
3
+ dash-html-components==2.0.0
4
+ Flask==3.0.3
5
+ plotly==5.24.1
6
+ pandas==1.5
7
+ numpy==1.26.4
8
+ gunicorn==20.1.0
9
+
10
+
11
+