misikoff commited on
Commit
c83a125
β€’
1 Parent(s): 3bc2192

Revert "feat: try removing all non essential python and notebook files"

Browse files

This reverts commit 3bc2192856ffc2269eba3919150e072ddc22ed62.

checker.ipynb ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 14,
6
+ "metadata": {},
7
+ "outputs": [],
8
+ "source": [
9
+ "# # import json as pandas\n",
10
+ "# import pandas as pd\n",
11
+ "# # read the data\n",
12
+ "# x = pd.read_json(\"processed/sales/final5.jsonl\", lines=True)\n",
13
+ "# # x\n",
14
+ "# x[\"Region Type\"].unique()\n",
15
+ "# x[\"Home Type\"].unique()\n",
16
+ "# x[\"Bedroom Count\"].unique()"
17
+ ]
18
+ },
19
+ {
20
+ "cell_type": "code",
21
+ "execution_count": 2,
22
+ "metadata": {},
23
+ "outputs": [
24
+ {
25
+ "name": "stderr",
26
+ "output_type": "stream",
27
+ "text": [
28
+ "/Users/misikoff/opt/anaconda3/envs/sta663/lib/python3.12/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
29
+ " from .autonotebook import tqdm as notebook_tqdm\n"
30
+ ]
31
+ }
32
+ ],
33
+ "source": [
34
+ "from datasets import load_dataset\n",
35
+ "from os import path"
36
+ ]
37
+ },
38
+ {
39
+ "cell_type": "code",
40
+ "execution_count": 3,
41
+ "metadata": {},
42
+ "outputs": [
43
+ {
44
+ "name": "stdout",
45
+ "output_type": "stream",
46
+ "text": [
47
+ "days_on_market\n"
48
+ ]
49
+ },
50
+ {
51
+ "name": "stderr",
52
+ "output_type": "stream",
53
+ "text": [
54
+ "Downloading builder script: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 26.9k/26.9k [00:00<00:00, 1.15MB/s]\n",
55
+ "Downloading readme: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 24.0k/24.0k [00:00<00:00, 31.6MB/s]\n",
56
+ "Downloading data: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 229M/229M [00:06<00:00, 36.5MB/s] \n",
57
+ "Generating train split: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 586714/586714 [00:18<00:00, 31528.72 examples/s]\n",
58
+ "Creating parquet from Arrow format: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 587/587 [00:00<00:00, 1263.22ba/s]\n"
59
+ ]
60
+ },
61
+ {
62
+ "name": "stdout",
63
+ "output_type": "stream",
64
+ "text": [
65
+ "for_sale_listings\n"
66
+ ]
67
+ },
68
+ {
69
+ "name": "stderr",
70
+ "output_type": "stream",
71
+ "text": [
72
+ "Downloading builder script: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 26.9k/26.9k [00:00<00:00, 29.5MB/s]\n",
73
+ "Downloading readme: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 24.0k/24.0k [00:00<00:00, 28.6MB/s]\n",
74
+ "Downloading data: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 180M/180M [00:05<00:00, 35.9MB/s] \n",
75
+ "Generating train split: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 578653/578653 [00:18<00:00, 32029.85 examples/s]\n",
76
+ "Creating parquet from Arrow format: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 579/579 [00:00<00:00, 1328.81ba/s]\n"
77
+ ]
78
+ },
79
+ {
80
+ "name": "stdout",
81
+ "output_type": "stream",
82
+ "text": [
83
+ "home_values\n"
84
+ ]
85
+ },
86
+ {
87
+ "name": "stderr",
88
+ "output_type": "stream",
89
+ "text": [
90
+ "Downloading builder script: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 26.9k/26.9k [00:00<00:00, 25.7MB/s]\n",
91
+ "Downloading readme: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 24.0k/24.0k [00:00<00:00, 24.1MB/s]\n",
92
+ "Downloading data: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 41.1M/41.1M [00:01<00:00, 35.7MB/s]\n",
93
+ "Generating train split: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 117912/117912 [00:03<00:00, 34620.98 examples/s]\n",
94
+ "Creating parquet from Arrow format: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 118/118 [00:00<00:00, 1494.28ba/s]\n"
95
+ ]
96
+ },
97
+ {
98
+ "name": "stdout",
99
+ "output_type": "stream",
100
+ "text": [
101
+ "home_values_forecasts\n"
102
+ ]
103
+ },
104
+ {
105
+ "name": "stderr",
106
+ "output_type": "stream",
107
+ "text": [
108
+ "Downloading builder script: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 26.9k/26.9k [00:00<00:00, 6.64MB/s]\n",
109
+ "Downloading readme: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 24.0k/24.0k [00:00<00:00, 35.1MB/s]\n",
110
+ "Downloading data: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 14.1M/14.1M [00:00<00:00, 22.3MB/s]\n",
111
+ "Generating train split: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 31854/31854 [00:01<00:00, 27067.63 examples/s]\n",
112
+ "Creating parquet from Arrow format: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 32/32 [00:00<00:00, 814.79ba/s]\n"
113
+ ]
114
+ },
115
+ {
116
+ "name": "stdout",
117
+ "output_type": "stream",
118
+ "text": [
119
+ "new_construction\n"
120
+ ]
121
+ },
122
+ {
123
+ "name": "stderr",
124
+ "output_type": "stream",
125
+ "text": [
126
+ "Downloading builder script: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 26.9k/26.9k [00:00<00:00, 34.8MB/s]\n",
127
+ "Downloading readme: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 24.0k/24.0k [00:00<00:00, 26.8MB/s]\n",
128
+ "Downloading data: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 10.9M/10.9M [00:00<00:00, 27.7MB/s]\n",
129
+ "Generating train split: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 49487/49487 [00:01<00:00, 36723.35 examples/s]\n",
130
+ "Creating parquet from Arrow format: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 50/50 [00:00<00:00, 1286.17ba/s]\n"
131
+ ]
132
+ },
133
+ {
134
+ "name": "stdout",
135
+ "output_type": "stream",
136
+ "text": [
137
+ "rentals\n"
138
+ ]
139
+ },
140
+ {
141
+ "name": "stderr",
142
+ "output_type": "stream",
143
+ "text": [
144
+ "Downloading builder script: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 26.9k/26.9k [00:00<00:00, 8.60MB/s]\n",
145
+ "Downloading readme: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 24.0k/24.0k [00:00<00:00, 27.0MB/s]\n",
146
+ "Downloading data: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 447M/447M [00:11<00:00, 39.1MB/s] \n",
147
+ "Generating train split: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 1258740/1258740 [00:30<00:00, 40747.46 examples/s]\n",
148
+ "Creating parquet from Arrow format: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 1259/1259 [00:00<00:00, 1421.17ba/s]\n"
149
+ ]
150
+ },
151
+ {
152
+ "name": "stdout",
153
+ "output_type": "stream",
154
+ "text": [
155
+ "sales\n"
156
+ ]
157
+ },
158
+ {
159
+ "name": "stderr",
160
+ "output_type": "stream",
161
+ "text": [
162
+ "Downloading builder script: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 26.9k/26.9k [00:00<00:00, 26.0MB/s]\n",
163
+ "Downloading readme: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 24.0k/24.0k [00:00<00:00, 31.7MB/s]\n",
164
+ "Downloading data: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 139M/139M [00:03<00:00, 36.0MB/s] \n",
165
+ "Generating train split: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 255024/255024 [00:10<00:00, 24344.14 examples/s]\n",
166
+ "Creating parquet from Arrow format: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 256/256 [00:00<00:00, 925.38ba/s]\n"
167
+ ]
168
+ }
169
+ ],
170
+ "source": [
171
+ "dataset_dict = {}\n",
172
+ "\n",
173
+ "configs = [\n",
174
+ " \"days_on_market\",\n",
175
+ " \"for_sale_listings\",\n",
176
+ " \"home_values\",\n",
177
+ " \"home_values_forecasts\",\n",
178
+ " \"new_construction\",\n",
179
+ " \"rentals\",\n",
180
+ " \"sales\",\n",
181
+ "]\n",
182
+ "for config in configs:\n",
183
+ " print(config)\n",
184
+ " dataset_dict[config] = load_dataset(\n",
185
+ " \"misikoff/zillow\",\n",
186
+ " config,\n",
187
+ " trust_remote_code=True,\n",
188
+ " download_mode=\"force_redownload\",\n",
189
+ " cache_dir=\"./cache\",\n",
190
+ " )\n",
191
+ " filename = path.join(\"parquet_files\", config + \".parquet\")\n",
192
+ " dataset_dict[config][\"train\"].to_parquet(filename)"
193
+ ]
194
+ },
195
+ {
196
+ "cell_type": "code",
197
+ "execution_count": 18,
198
+ "metadata": {},
199
+ "outputs": [],
200
+ "source": [
201
+ "# import pyarrow as pa\n",
202
+ "\n",
203
+ "\n",
204
+ "# df = pd.read_feather(\n",
205
+ "# \"~/desktop/cache/misikoff___zillow/sales/1.1.0/c70d9545e9cef7612b795e19b5393a565f297e17856ab372df6f4026ecc498ae/zillow-train.arrow\"\n",
206
+ "# )\n",
207
+ "# df"
208
+ ]
209
+ },
210
+ {
211
+ "cell_type": "code",
212
+ "execution_count": 20,
213
+ "metadata": {},
214
+ "outputs": [
215
+ {
216
+ "name": "stderr",
217
+ "output_type": "stream",
218
+ "text": [
219
+ "Creating parquet from Arrow format: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 256/256 [00:00<00:00, 738.39ba/s]\n"
220
+ ]
221
+ },
222
+ {
223
+ "data": {
224
+ "text/plain": [
225
+ "27088039"
226
+ ]
227
+ },
228
+ "execution_count": 20,
229
+ "metadata": {},
230
+ "output_type": "execute_result"
231
+ }
232
+ ],
233
+ "source": [
234
+ "dataset_dict[config][\"train\"].to_parquet(\"test-sales.parquet\")"
235
+ ]
236
+ },
237
+ {
238
+ "cell_type": "code",
239
+ "execution_count": 32,
240
+ "metadata": {},
241
+ "outputs": [
242
+ {
243
+ "data": {
244
+ "text/plain": [
245
+ "{'Region ID': '102001',\n",
246
+ " 'Size Rank': 0,\n",
247
+ " 'Region': 'United States',\n",
248
+ " 'Region Type': 0,\n",
249
+ " 'State': None,\n",
250
+ " 'Home Type': 0,\n",
251
+ " 'Date': datetime.datetime(2008, 2, 2, 0, 0),\n",
252
+ " 'Mean Sale to List Ratio (Smoothed)': None,\n",
253
+ " 'Median Sale to List Ratio': None,\n",
254
+ " 'Median Sale Price': 172000.0,\n",
255
+ " 'Median Sale Price (Smoothed) (Seasonally Adjusted)': None,\n",
256
+ " 'Median Sale Price (Smoothed)': None,\n",
257
+ " 'Median Sale to List Ratio (Smoothed)': None,\n",
258
+ " '% Sold Below List': None,\n",
259
+ " '% Sold Below List (Smoothed)': None,\n",
260
+ " '% Sold Above List': None,\n",
261
+ " '% Sold Above List (Smoothed)': None,\n",
262
+ " 'Mean Sale to List Ratio': None}"
263
+ ]
264
+ },
265
+ "execution_count": 32,
266
+ "metadata": {},
267
+ "output_type": "execute_result"
268
+ }
269
+ ],
270
+ "source": [
271
+ "gen = iter(dataset_dict[config][\"train\"])\n",
272
+ "next(gen)"
273
+ ]
274
+ }
275
+ ],
276
+ "metadata": {
277
+ "kernelspec": {
278
+ "display_name": "sta663",
279
+ "language": "python",
280
+ "name": "python3"
281
+ },
282
+ "language_info": {
283
+ "codemirror_mode": {
284
+ "name": "ipython",
285
+ "version": 3
286
+ },
287
+ "file_extension": ".py",
288
+ "mimetype": "text/x-python",
289
+ "name": "python",
290
+ "nbconvert_exporter": "python",
291
+ "pygments_lexer": "ipython3",
292
+ "version": "3.12.2"
293
+ }
294
+ },
295
+ "nbformat": 4,
296
+ "nbformat_minor": 2
297
+ }
processors/README.md ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+
2
+ # Processors
3
+
4
+ These processors build the processed files found in the `processed` directory. They are used to ingest the raw data and prepare it for analysis.
processors/days_on_market.ipynb ADDED
@@ -0,0 +1,783 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 4,
6
+ "metadata": {},
7
+ "outputs": [],
8
+ "source": [
9
+ "import pandas as pd\n",
10
+ "import os\n",
11
+ "\n",
12
+ "from helpers import (\n",
13
+ " get_data_path_for_config,\n",
14
+ " get_combined_df,\n",
15
+ " save_final_df_as_jsonl,\n",
16
+ " handle_slug_column_mappings,\n",
17
+ " set_home_type,\n",
18
+ ")"
19
+ ]
20
+ },
21
+ {
22
+ "cell_type": "code",
23
+ "execution_count": 5,
24
+ "metadata": {},
25
+ "outputs": [],
26
+ "source": [
27
+ "CONFIG_NAME = \"days_on_market\""
28
+ ]
29
+ },
30
+ {
31
+ "cell_type": "code",
32
+ "execution_count": 6,
33
+ "metadata": {},
34
+ "outputs": [
35
+ {
36
+ "name": "stdout",
37
+ "output_type": "stream",
38
+ "text": [
39
+ "processing Metro_med_listings_price_cut_amt_uc_sfr_month.csv\n",
40
+ "processing Metro_perc_listings_price_cut_uc_sfr_week.csv\n",
41
+ "processing Metro_med_listings_price_cut_amt_uc_sfrcondo_month.csv\n",
42
+ "processing Metro_med_listings_price_cut_amt_uc_sfr_week.csv\n",
43
+ "processing Metro_med_doz_pending_uc_sfrcondo_month.csv\n",
44
+ "processing Metro_mean_listings_price_cut_amt_uc_sfr_sm_month.csv\n",
45
+ "processing Metro_med_listings_price_cut_perc_uc_sfrcondo_sm_month.csv\n",
46
+ "processing Metro_mean_days_to_close_uc_sfrcondo_week.csv\n",
47
+ "processing Metro_mean_days_to_close_uc_sfrcondo_month.csv\n",
48
+ "processing Metro_mean_listings_price_cut_amt_uc_sfrcondo_sm_month.csv\n",
49
+ "processing Metro_med_listings_price_cut_perc_uc_sfr_week.csv\n",
50
+ "processing Metro_median_days_to_close_uc_sfrcondo_sm_week.csv\n",
51
+ "processing Metro_med_listings_price_cut_perc_uc_sfr_sm_week.csv\n",
52
+ "processing Metro_mean_listings_price_cut_perc_uc_sfrcondo_sm_week.csv\n",
53
+ "processing Metro_perc_listings_price_cut_uc_sfrcondo_week.csv\n",
54
+ "processing Metro_med_doz_pending_uc_sfrcondo_sm_month.csv\n",
55
+ "processing Metro_mean_days_to_close_uc_sfrcondo_sm_week.csv\n",
56
+ "processing Metro_med_listings_price_cut_perc_uc_sfrcondo_week.csv\n",
57
+ "processing Metro_mean_listings_price_cut_amt_uc_sfr_week.csv\n",
58
+ "processing Metro_med_listings_price_cut_perc_uc_sfrcondo_month.csv\n",
59
+ "processing Metro_mean_doz_pending_uc_sfrcondo_week.csv\n",
60
+ "processing Metro_mean_listings_price_cut_amt_uc_sfrcondo_week.csv\n",
61
+ "processing Metro_median_days_to_close_uc_sfrcondo_week.csv\n",
62
+ "processing Metro_med_listings_price_cut_amt_uc_sfr_sm_month.csv\n",
63
+ "processing Metro_mean_doz_pending_uc_sfrcondo_sm_month.csv\n",
64
+ "processing Metro_med_listings_price_cut_perc_uc_sfr_sm_month.csv\n",
65
+ "processing Metro_perc_listings_price_cut_uc_sfrcondo_sm_week.csv\n",
66
+ "processing Metro_median_days_to_close_uc_sfrcondo_sm_month.csv\n",
67
+ "processing Metro_med_listings_price_cut_perc_uc_sfr_month.csv\n",
68
+ "processing Metro_mean_listings_price_cut_perc_uc_sfrcondo_week.csv\n",
69
+ "processing Metro_med_listings_price_cut_amt_uc_sfrcondo_week.csv\n",
70
+ "processing Metro_med_listings_price_cut_amt_uc_sfrcondo_sm_week.csv\n",
71
+ "processing Metro_mean_days_to_close_uc_sfrcondo_sm_month.csv\n",
72
+ "processing Metro_med_listings_price_cut_amt_uc_sfr_sm_week.csv\n",
73
+ "processing Metro_mean_doz_pending_uc_sfrcondo_sm_week.csv\n",
74
+ "processing Metro_mean_listings_price_cut_amt_uc_sfrcondo_sm_week.csv\n",
75
+ "processing Metro_mean_listings_price_cut_amt_uc_sfr_sm_week.csv\n",
76
+ "processing Metro_perc_listings_price_cut_uc_sfrcondo_sm_month.csv\n",
77
+ "processing Metro_mean_listings_price_cut_amt_uc_sfrcondo_month.csv\n",
78
+ "processing Metro_med_listings_price_cut_amt_uc_sfrcondo_sm_month.csv\n",
79
+ "processing Metro_med_doz_pending_uc_sfrcondo_sm_week.csv\n",
80
+ "processing Metro_med_listings_price_cut_perc_uc_sfrcondo_sm_week.csv\n",
81
+ "processing Metro_perc_listings_price_cut_uc_sfr_month.csv\n",
82
+ "processing Metro_med_doz_pending_uc_sfrcondo_week.csv\n",
83
+ "processing Metro_mean_listings_price_cut_perc_uc_sfrcondo_sm_month.csv\n",
84
+ "processing Metro_perc_listings_price_cut_uc_sfr_sm_month.csv\n",
85
+ "processing Metro_median_days_to_close_uc_sfrcondo_month.csv\n",
86
+ "processing Metro_perc_listings_price_cut_uc_sfr_sm_week.csv\n",
87
+ "processing Metro_mean_listings_price_cut_perc_uc_sfrcondo_month.csv\n",
88
+ "processing Metro_mean_listings_price_cut_amt_uc_sfr_month.csv\n",
89
+ "processing Metro_mean_doz_pending_uc_sfrcondo_month.csv\n"
90
+ ]
91
+ },
92
+ {
93
+ "data": {
94
+ "text/html": [
95
+ "<div>\n",
96
+ "<style scoped>\n",
97
+ " .dataframe tbody tr th:only-of-type {\n",
98
+ " vertical-align: middle;\n",
99
+ " }\n",
100
+ "\n",
101
+ " .dataframe tbody tr th {\n",
102
+ " vertical-align: top;\n",
103
+ " }\n",
104
+ "\n",
105
+ " .dataframe thead th {\n",
106
+ " text-align: right;\n",
107
+ " }\n",
108
+ "</style>\n",
109
+ "<table border=\"1\" class=\"dataframe\">\n",
110
+ " <thead>\n",
111
+ " <tr style=\"text-align: right;\">\n",
112
+ " <th></th>\n",
113
+ " <th>RegionID</th>\n",
114
+ " <th>SizeRank</th>\n",
115
+ " <th>RegionName</th>\n",
116
+ " <th>RegionType</th>\n",
117
+ " <th>StateName</th>\n",
118
+ " <th>Home Type</th>\n",
119
+ " <th>Date</th>\n",
120
+ " <th>Percent Listings Price Cut</th>\n",
121
+ " <th>Mean Listings Price Cut Amount</th>\n",
122
+ " <th>Percent Listings Price Cut (Smoothed)</th>\n",
123
+ " <th>Mean Listings Price Cut Amount (Smoothed)</th>\n",
124
+ " <th>Median Days on Pending (Smoothed)</th>\n",
125
+ " <th>Median Days on Pending</th>\n",
126
+ " </tr>\n",
127
+ " </thead>\n",
128
+ " <tbody>\n",
129
+ " <tr>\n",
130
+ " <th>0</th>\n",
131
+ " <td>102001</td>\n",
132
+ " <td>0</td>\n",
133
+ " <td>United States</td>\n",
134
+ " <td>country</td>\n",
135
+ " <td>NaN</td>\n",
136
+ " <td>SFR</td>\n",
137
+ " <td>2018-01-06</td>\n",
138
+ " <td>NaN</td>\n",
139
+ " <td>13508.368375</td>\n",
140
+ " <td>NaN</td>\n",
141
+ " <td>NaN</td>\n",
142
+ " <td>NaN</td>\n",
143
+ " <td>NaN</td>\n",
144
+ " </tr>\n",
145
+ " <tr>\n",
146
+ " <th>1</th>\n",
147
+ " <td>102001</td>\n",
148
+ " <td>0</td>\n",
149
+ " <td>United States</td>\n",
150
+ " <td>country</td>\n",
151
+ " <td>NaN</td>\n",
152
+ " <td>SFR</td>\n",
153
+ " <td>2018-01-13</td>\n",
154
+ " <td>0.049042</td>\n",
155
+ " <td>14114.788383</td>\n",
156
+ " <td>NaN</td>\n",
157
+ " <td>NaN</td>\n",
158
+ " <td>NaN</td>\n",
159
+ " <td>NaN</td>\n",
160
+ " </tr>\n",
161
+ " <tr>\n",
162
+ " <th>2</th>\n",
163
+ " <td>102001</td>\n",
164
+ " <td>0</td>\n",
165
+ " <td>United States</td>\n",
166
+ " <td>country</td>\n",
167
+ " <td>NaN</td>\n",
168
+ " <td>SFR</td>\n",
169
+ " <td>2018-01-20</td>\n",
170
+ " <td>0.044740</td>\n",
171
+ " <td>14326.128956</td>\n",
172
+ " <td>NaN</td>\n",
173
+ " <td>NaN</td>\n",
174
+ " <td>NaN</td>\n",
175
+ " <td>NaN</td>\n",
176
+ " </tr>\n",
177
+ " <tr>\n",
178
+ " <th>3</th>\n",
179
+ " <td>102001</td>\n",
180
+ " <td>0</td>\n",
181
+ " <td>United States</td>\n",
182
+ " <td>country</td>\n",
183
+ " <td>NaN</td>\n",
184
+ " <td>SFR</td>\n",
185
+ " <td>2018-01-27</td>\n",
186
+ " <td>0.047930</td>\n",
187
+ " <td>13998.585612</td>\n",
188
+ " <td>NaN</td>\n",
189
+ " <td>13998.585612</td>\n",
190
+ " <td>NaN</td>\n",
191
+ " <td>NaN</td>\n",
192
+ " </tr>\n",
193
+ " <tr>\n",
194
+ " <th>4</th>\n",
195
+ " <td>102001</td>\n",
196
+ " <td>0</td>\n",
197
+ " <td>United States</td>\n",
198
+ " <td>country</td>\n",
199
+ " <td>NaN</td>\n",
200
+ " <td>SFR</td>\n",
201
+ " <td>2018-02-03</td>\n",
202
+ " <td>0.047622</td>\n",
203
+ " <td>14120.035549</td>\n",
204
+ " <td>0.047622</td>\n",
205
+ " <td>14120.035549</td>\n",
206
+ " <td>NaN</td>\n",
207
+ " <td>NaN</td>\n",
208
+ " </tr>\n",
209
+ " <tr>\n",
210
+ " <th>...</th>\n",
211
+ " <td>...</td>\n",
212
+ " <td>...</td>\n",
213
+ " <td>...</td>\n",
214
+ " <td>...</td>\n",
215
+ " <td>...</td>\n",
216
+ " <td>...</td>\n",
217
+ " <td>...</td>\n",
218
+ " <td>...</td>\n",
219
+ " <td>...</td>\n",
220
+ " <td>...</td>\n",
221
+ " <td>...</td>\n",
222
+ " <td>...</td>\n",
223
+ " <td>...</td>\n",
224
+ " </tr>\n",
225
+ " <tr>\n",
226
+ " <th>586709</th>\n",
227
+ " <td>845172</td>\n",
228
+ " <td>769</td>\n",
229
+ " <td>Winfield, KS</td>\n",
230
+ " <td>msa</td>\n",
231
+ " <td>KS</td>\n",
232
+ " <td>all homes</td>\n",
233
+ " <td>2024-01-06</td>\n",
234
+ " <td>0.094017</td>\n",
235
+ " <td>NaN</td>\n",
236
+ " <td>0.037378</td>\n",
237
+ " <td>NaN</td>\n",
238
+ " <td>NaN</td>\n",
239
+ " <td>NaN</td>\n",
240
+ " </tr>\n",
241
+ " <tr>\n",
242
+ " <th>586710</th>\n",
243
+ " <td>845172</td>\n",
244
+ " <td>769</td>\n",
245
+ " <td>Winfield, KS</td>\n",
246
+ " <td>msa</td>\n",
247
+ " <td>KS</td>\n",
248
+ " <td>all homes</td>\n",
249
+ " <td>2024-01-13</td>\n",
250
+ " <td>0.070175</td>\n",
251
+ " <td>NaN</td>\n",
252
+ " <td>0.043203</td>\n",
253
+ " <td>NaN</td>\n",
254
+ " <td>NaN</td>\n",
255
+ " <td>NaN</td>\n",
256
+ " </tr>\n",
257
+ " <tr>\n",
258
+ " <th>586711</th>\n",
259
+ " <td>845172</td>\n",
260
+ " <td>769</td>\n",
261
+ " <td>Winfield, KS</td>\n",
262
+ " <td>msa</td>\n",
263
+ " <td>KS</td>\n",
264
+ " <td>all homes</td>\n",
265
+ " <td>2024-01-20</td>\n",
266
+ " <td>0.043478</td>\n",
267
+ " <td>NaN</td>\n",
268
+ " <td>0.054073</td>\n",
269
+ " <td>NaN</td>\n",
270
+ " <td>NaN</td>\n",
271
+ " <td>NaN</td>\n",
272
+ " </tr>\n",
273
+ " <tr>\n",
274
+ " <th>586712</th>\n",
275
+ " <td>845172</td>\n",
276
+ " <td>769</td>\n",
277
+ " <td>Winfield, KS</td>\n",
278
+ " <td>msa</td>\n",
279
+ " <td>KS</td>\n",
280
+ " <td>all homes</td>\n",
281
+ " <td>2024-01-27</td>\n",
282
+ " <td>0.036697</td>\n",
283
+ " <td>NaN</td>\n",
284
+ " <td>0.061092</td>\n",
285
+ " <td>NaN</td>\n",
286
+ " <td>NaN</td>\n",
287
+ " <td>NaN</td>\n",
288
+ " </tr>\n",
289
+ " <tr>\n",
290
+ " <th>586713</th>\n",
291
+ " <td>845172</td>\n",
292
+ " <td>769</td>\n",
293
+ " <td>Winfield, KS</td>\n",
294
+ " <td>msa</td>\n",
295
+ " <td>KS</td>\n",
296
+ " <td>all homes</td>\n",
297
+ " <td>2024-02-03</td>\n",
298
+ " <td>0.077670</td>\n",
299
+ " <td>NaN</td>\n",
300
+ " <td>0.057005</td>\n",
301
+ " <td>NaN</td>\n",
302
+ " <td>NaN</td>\n",
303
+ " <td>NaN</td>\n",
304
+ " </tr>\n",
305
+ " </tbody>\n",
306
+ "</table>\n",
307
+ "<p>586714 rows Γ— 13 columns</p>\n",
308
+ "</div>"
309
+ ],
310
+ "text/plain": [
311
+ " RegionID SizeRank RegionName RegionType StateName Home Type \\\n",
312
+ "0 102001 0 United States country NaN SFR \n",
313
+ "1 102001 0 United States country NaN SFR \n",
314
+ "2 102001 0 United States country NaN SFR \n",
315
+ "3 102001 0 United States country NaN SFR \n",
316
+ "4 102001 0 United States country NaN SFR \n",
317
+ "... ... ... ... ... ... ... \n",
318
+ "586709 845172 769 Winfield, KS msa KS all homes \n",
319
+ "586710 845172 769 Winfield, KS msa KS all homes \n",
320
+ "586711 845172 769 Winfield, KS msa KS all homes \n",
321
+ "586712 845172 769 Winfield, KS msa KS all homes \n",
322
+ "586713 845172 769 Winfield, KS msa KS all homes \n",
323
+ "\n",
324
+ " Date Percent Listings Price Cut \\\n",
325
+ "0 2018-01-06 NaN \n",
326
+ "1 2018-01-13 0.049042 \n",
327
+ "2 2018-01-20 0.044740 \n",
328
+ "3 2018-01-27 0.047930 \n",
329
+ "4 2018-02-03 0.047622 \n",
330
+ "... ... ... \n",
331
+ "586709 2024-01-06 0.094017 \n",
332
+ "586710 2024-01-13 0.070175 \n",
333
+ "586711 2024-01-20 0.043478 \n",
334
+ "586712 2024-01-27 0.036697 \n",
335
+ "586713 2024-02-03 0.077670 \n",
336
+ "\n",
337
+ " Mean Listings Price Cut Amount Percent Listings Price Cut (Smoothed) \\\n",
338
+ "0 13508.368375 NaN \n",
339
+ "1 14114.788383 NaN \n",
340
+ "2 14326.128956 NaN \n",
341
+ "3 13998.585612 NaN \n",
342
+ "4 14120.035549 0.047622 \n",
343
+ "... ... ... \n",
344
+ "586709 NaN 0.037378 \n",
345
+ "586710 NaN 0.043203 \n",
346
+ "586711 NaN 0.054073 \n",
347
+ "586712 NaN 0.061092 \n",
348
+ "586713 NaN 0.057005 \n",
349
+ "\n",
350
+ " Mean Listings Price Cut Amount (Smoothed) \\\n",
351
+ "0 NaN \n",
352
+ "1 NaN \n",
353
+ "2 NaN \n",
354
+ "3 13998.585612 \n",
355
+ "4 14120.035549 \n",
356
+ "... ... \n",
357
+ "586709 NaN \n",
358
+ "586710 NaN \n",
359
+ "586711 NaN \n",
360
+ "586712 NaN \n",
361
+ "586713 NaN \n",
362
+ "\n",
363
+ " Median Days on Pending (Smoothed) Median Days on Pending \n",
364
+ "0 NaN NaN \n",
365
+ "1 NaN NaN \n",
366
+ "2 NaN NaN \n",
367
+ "3 NaN NaN \n",
368
+ "4 NaN NaN \n",
369
+ "... ... ... \n",
370
+ "586709 NaN NaN \n",
371
+ "586710 NaN NaN \n",
372
+ "586711 NaN NaN \n",
373
+ "586712 NaN NaN \n",
374
+ "586713 NaN NaN \n",
375
+ "\n",
376
+ "[586714 rows x 13 columns]"
377
+ ]
378
+ },
379
+ "execution_count": 6,
380
+ "metadata": {},
381
+ "output_type": "execute_result"
382
+ }
383
+ ],
384
+ "source": [
385
+ "data_frames = []\n",
386
+ "\n",
387
+ "exclude_columns = [\n",
388
+ " \"RegionID\",\n",
389
+ " \"SizeRank\",\n",
390
+ " \"RegionName\",\n",
391
+ " \"RegionType\",\n",
392
+ " \"StateName\",\n",
393
+ " \"Home Type\",\n",
394
+ "]\n",
395
+ "\n",
396
+ "slug_column_mappings = {\n",
397
+ " \"_mean_listings_price_cut_amt_\": \"Mean Listings Price Cut Amount\",\n",
398
+ " \"_med_doz_pending_\": \"Median Days on Pending\",\n",
399
+ " \"_median_days_to_pending_\": \"Median Days to Close\",\n",
400
+ " \"_perc_listings_price_cut_\": \"Percent Listings Price Cut\",\n",
401
+ "}\n",
402
+ "\n",
403
+ "data_dir_path = get_data_path_for_config(CONFIG_NAME)\n",
404
+ "\n",
405
+ "for filename in os.listdir(data_dir_path):\n",
406
+ " if filename.endswith(\".csv\"):\n",
407
+ " print(\"processing \" + filename)\n",
408
+ " # skip month files for now since they are redundant\n",
409
+ " if \"month\" in filename:\n",
410
+ " continue\n",
411
+ "\n",
412
+ " cur_df = pd.read_csv(os.path.join(data_dir_path, filename))\n",
413
+ "\n",
414
+ " cur_df[\"RegionName\"] = cur_df[\"RegionName\"].astype(str)\n",
415
+ " cur_df = set_home_type(cur_df, filename)\n",
416
+ "\n",
417
+ " data_frames = handle_slug_column_mappings(\n",
418
+ " data_frames, slug_column_mappings, exclude_columns, filename, cur_df\n",
419
+ " )\n",
420
+ "\n",
421
+ "\n",
422
+ "combined_df = get_combined_df(\n",
423
+ " data_frames,\n",
424
+ " [\n",
425
+ " \"RegionID\",\n",
426
+ " \"SizeRank\",\n",
427
+ " \"RegionName\",\n",
428
+ " \"RegionType\",\n",
429
+ " \"StateName\",\n",
430
+ " \"Home Type\",\n",
431
+ " \"Date\",\n",
432
+ " ],\n",
433
+ ")\n",
434
+ "\n",
435
+ "combined_df"
436
+ ]
437
+ },
438
+ {
439
+ "cell_type": "code",
440
+ "execution_count": 7,
441
+ "metadata": {},
442
+ "outputs": [
443
+ {
444
+ "data": {
445
+ "text/html": [
446
+ "<div>\n",
447
+ "<style scoped>\n",
448
+ " .dataframe tbody tr th:only-of-type {\n",
449
+ " vertical-align: middle;\n",
450
+ " }\n",
451
+ "\n",
452
+ " .dataframe tbody tr th {\n",
453
+ " vertical-align: top;\n",
454
+ " }\n",
455
+ "\n",
456
+ " .dataframe thead th {\n",
457
+ " text-align: right;\n",
458
+ " }\n",
459
+ "</style>\n",
460
+ "<table border=\"1\" class=\"dataframe\">\n",
461
+ " <thead>\n",
462
+ " <tr style=\"text-align: right;\">\n",
463
+ " <th></th>\n",
464
+ " <th>Region ID</th>\n",
465
+ " <th>Size Rank</th>\n",
466
+ " <th>Region</th>\n",
467
+ " <th>Region Type</th>\n",
468
+ " <th>State</th>\n",
469
+ " <th>Home Type</th>\n",
470
+ " <th>Date</th>\n",
471
+ " <th>Percent Listings Price Cut</th>\n",
472
+ " <th>Mean Listings Price Cut Amount</th>\n",
473
+ " <th>Percent Listings Price Cut (Smoothed)</th>\n",
474
+ " <th>Mean Listings Price Cut Amount (Smoothed)</th>\n",
475
+ " <th>Median Days on Pending (Smoothed)</th>\n",
476
+ " <th>Median Days on Pending</th>\n",
477
+ " </tr>\n",
478
+ " </thead>\n",
479
+ " <tbody>\n",
480
+ " <tr>\n",
481
+ " <th>0</th>\n",
482
+ " <td>102001</td>\n",
483
+ " <td>0</td>\n",
484
+ " <td>United States</td>\n",
485
+ " <td>country</td>\n",
486
+ " <td>NaN</td>\n",
487
+ " <td>SFR</td>\n",
488
+ " <td>2018-01-06</td>\n",
489
+ " <td>NaN</td>\n",
490
+ " <td>13508.368375</td>\n",
491
+ " <td>NaN</td>\n",
492
+ " <td>NaN</td>\n",
493
+ " <td>NaN</td>\n",
494
+ " <td>NaN</td>\n",
495
+ " </tr>\n",
496
+ " <tr>\n",
497
+ " <th>1</th>\n",
498
+ " <td>102001</td>\n",
499
+ " <td>0</td>\n",
500
+ " <td>United States</td>\n",
501
+ " <td>country</td>\n",
502
+ " <td>NaN</td>\n",
503
+ " <td>SFR</td>\n",
504
+ " <td>2018-01-13</td>\n",
505
+ " <td>0.049042</td>\n",
506
+ " <td>14114.788383</td>\n",
507
+ " <td>NaN</td>\n",
508
+ " <td>NaN</td>\n",
509
+ " <td>NaN</td>\n",
510
+ " <td>NaN</td>\n",
511
+ " </tr>\n",
512
+ " <tr>\n",
513
+ " <th>2</th>\n",
514
+ " <td>102001</td>\n",
515
+ " <td>0</td>\n",
516
+ " <td>United States</td>\n",
517
+ " <td>country</td>\n",
518
+ " <td>NaN</td>\n",
519
+ " <td>SFR</td>\n",
520
+ " <td>2018-01-20</td>\n",
521
+ " <td>0.044740</td>\n",
522
+ " <td>14326.128956</td>\n",
523
+ " <td>NaN</td>\n",
524
+ " <td>NaN</td>\n",
525
+ " <td>NaN</td>\n",
526
+ " <td>NaN</td>\n",
527
+ " </tr>\n",
528
+ " <tr>\n",
529
+ " <th>3</th>\n",
530
+ " <td>102001</td>\n",
531
+ " <td>0</td>\n",
532
+ " <td>United States</td>\n",
533
+ " <td>country</td>\n",
534
+ " <td>NaN</td>\n",
535
+ " <td>SFR</td>\n",
536
+ " <td>2018-01-27</td>\n",
537
+ " <td>0.047930</td>\n",
538
+ " <td>13998.585612</td>\n",
539
+ " <td>NaN</td>\n",
540
+ " <td>13998.585612</td>\n",
541
+ " <td>NaN</td>\n",
542
+ " <td>NaN</td>\n",
543
+ " </tr>\n",
544
+ " <tr>\n",
545
+ " <th>4</th>\n",
546
+ " <td>102001</td>\n",
547
+ " <td>0</td>\n",
548
+ " <td>United States</td>\n",
549
+ " <td>country</td>\n",
550
+ " <td>NaN</td>\n",
551
+ " <td>SFR</td>\n",
552
+ " <td>2018-02-03</td>\n",
553
+ " <td>0.047622</td>\n",
554
+ " <td>14120.035549</td>\n",
555
+ " <td>0.047622</td>\n",
556
+ " <td>14120.035549</td>\n",
557
+ " <td>NaN</td>\n",
558
+ " <td>NaN</td>\n",
559
+ " </tr>\n",
560
+ " <tr>\n",
561
+ " <th>...</th>\n",
562
+ " <td>...</td>\n",
563
+ " <td>...</td>\n",
564
+ " <td>...</td>\n",
565
+ " <td>...</td>\n",
566
+ " <td>...</td>\n",
567
+ " <td>...</td>\n",
568
+ " <td>...</td>\n",
569
+ " <td>...</td>\n",
570
+ " <td>...</td>\n",
571
+ " <td>...</td>\n",
572
+ " <td>...</td>\n",
573
+ " <td>...</td>\n",
574
+ " <td>...</td>\n",
575
+ " </tr>\n",
576
+ " <tr>\n",
577
+ " <th>586709</th>\n",
578
+ " <td>845172</td>\n",
579
+ " <td>769</td>\n",
580
+ " <td>Winfield, KS</td>\n",
581
+ " <td>msa</td>\n",
582
+ " <td>KS</td>\n",
583
+ " <td>all homes</td>\n",
584
+ " <td>2024-01-06</td>\n",
585
+ " <td>0.094017</td>\n",
586
+ " <td>NaN</td>\n",
587
+ " <td>0.037378</td>\n",
588
+ " <td>NaN</td>\n",
589
+ " <td>NaN</td>\n",
590
+ " <td>NaN</td>\n",
591
+ " </tr>\n",
592
+ " <tr>\n",
593
+ " <th>586710</th>\n",
594
+ " <td>845172</td>\n",
595
+ " <td>769</td>\n",
596
+ " <td>Winfield, KS</td>\n",
597
+ " <td>msa</td>\n",
598
+ " <td>KS</td>\n",
599
+ " <td>all homes</td>\n",
600
+ " <td>2024-01-13</td>\n",
601
+ " <td>0.070175</td>\n",
602
+ " <td>NaN</td>\n",
603
+ " <td>0.043203</td>\n",
604
+ " <td>NaN</td>\n",
605
+ " <td>NaN</td>\n",
606
+ " <td>NaN</td>\n",
607
+ " </tr>\n",
608
+ " <tr>\n",
609
+ " <th>586711</th>\n",
610
+ " <td>845172</td>\n",
611
+ " <td>769</td>\n",
612
+ " <td>Winfield, KS</td>\n",
613
+ " <td>msa</td>\n",
614
+ " <td>KS</td>\n",
615
+ " <td>all homes</td>\n",
616
+ " <td>2024-01-20</td>\n",
617
+ " <td>0.043478</td>\n",
618
+ " <td>NaN</td>\n",
619
+ " <td>0.054073</td>\n",
620
+ " <td>NaN</td>\n",
621
+ " <td>NaN</td>\n",
622
+ " <td>NaN</td>\n",
623
+ " </tr>\n",
624
+ " <tr>\n",
625
+ " <th>586712</th>\n",
626
+ " <td>845172</td>\n",
627
+ " <td>769</td>\n",
628
+ " <td>Winfield, KS</td>\n",
629
+ " <td>msa</td>\n",
630
+ " <td>KS</td>\n",
631
+ " <td>all homes</td>\n",
632
+ " <td>2024-01-27</td>\n",
633
+ " <td>0.036697</td>\n",
634
+ " <td>NaN</td>\n",
635
+ " <td>0.061092</td>\n",
636
+ " <td>NaN</td>\n",
637
+ " <td>NaN</td>\n",
638
+ " <td>NaN</td>\n",
639
+ " </tr>\n",
640
+ " <tr>\n",
641
+ " <th>586713</th>\n",
642
+ " <td>845172</td>\n",
643
+ " <td>769</td>\n",
644
+ " <td>Winfield, KS</td>\n",
645
+ " <td>msa</td>\n",
646
+ " <td>KS</td>\n",
647
+ " <td>all homes</td>\n",
648
+ " <td>2024-02-03</td>\n",
649
+ " <td>0.077670</td>\n",
650
+ " <td>NaN</td>\n",
651
+ " <td>0.057005</td>\n",
652
+ " <td>NaN</td>\n",
653
+ " <td>NaN</td>\n",
654
+ " <td>NaN</td>\n",
655
+ " </tr>\n",
656
+ " </tbody>\n",
657
+ "</table>\n",
658
+ "<p>586714 rows Γ— 13 columns</p>\n",
659
+ "</div>"
660
+ ],
661
+ "text/plain": [
662
+ " Region ID Size Rank Region Region Type State Home Type \\\n",
663
+ "0 102001 0 United States country NaN SFR \n",
664
+ "1 102001 0 United States country NaN SFR \n",
665
+ "2 102001 0 United States country NaN SFR \n",
666
+ "3 102001 0 United States country NaN SFR \n",
667
+ "4 102001 0 United States country NaN SFR \n",
668
+ "... ... ... ... ... ... ... \n",
669
+ "586709 845172 769 Winfield, KS msa KS all homes \n",
670
+ "586710 845172 769 Winfield, KS msa KS all homes \n",
671
+ "586711 845172 769 Winfield, KS msa KS all homes \n",
672
+ "586712 845172 769 Winfield, KS msa KS all homes \n",
673
+ "586713 845172 769 Winfield, KS msa KS all homes \n",
674
+ "\n",
675
+ " Date Percent Listings Price Cut Mean Listings Price Cut Amount \\\n",
676
+ "0 2018-01-06 NaN 13508.368375 \n",
677
+ "1 2018-01-13 0.049042 14114.788383 \n",
678
+ "2 2018-01-20 0.044740 14326.128956 \n",
679
+ "3 2018-01-27 0.047930 13998.585612 \n",
680
+ "4 2018-02-03 0.047622 14120.035549 \n",
681
+ "... ... ... ... \n",
682
+ "586709 2024-01-06 0.094017 NaN \n",
683
+ "586710 2024-01-13 0.070175 NaN \n",
684
+ "586711 2024-01-20 0.043478 NaN \n",
685
+ "586712 2024-01-27 0.036697 NaN \n",
686
+ "586713 2024-02-03 0.077670 NaN \n",
687
+ "\n",
688
+ " Percent Listings Price Cut (Smoothed) \\\n",
689
+ "0 NaN \n",
690
+ "1 NaN \n",
691
+ "2 NaN \n",
692
+ "3 NaN \n",
693
+ "4 0.047622 \n",
694
+ "... ... \n",
695
+ "586709 0.037378 \n",
696
+ "586710 0.043203 \n",
697
+ "586711 0.054073 \n",
698
+ "586712 0.061092 \n",
699
+ "586713 0.057005 \n",
700
+ "\n",
701
+ " Mean Listings Price Cut Amount (Smoothed) \\\n",
702
+ "0 NaN \n",
703
+ "1 NaN \n",
704
+ "2 NaN \n",
705
+ "3 13998.585612 \n",
706
+ "4 14120.035549 \n",
707
+ "... ... \n",
708
+ "586709 NaN \n",
709
+ "586710 NaN \n",
710
+ "586711 NaN \n",
711
+ "586712 NaN \n",
712
+ "586713 NaN \n",
713
+ "\n",
714
+ " Median Days on Pending (Smoothed) Median Days on Pending \n",
715
+ "0 NaN NaN \n",
716
+ "1 NaN NaN \n",
717
+ "2 NaN NaN \n",
718
+ "3 NaN NaN \n",
719
+ "4 NaN NaN \n",
720
+ "... ... ... \n",
721
+ "586709 NaN NaN \n",
722
+ "586710 NaN NaN \n",
723
+ "586711 NaN NaN \n",
724
+ "586712 NaN NaN \n",
725
+ "586713 NaN NaN \n",
726
+ "\n",
727
+ "[586714 rows x 13 columns]"
728
+ ]
729
+ },
730
+ "execution_count": 7,
731
+ "metadata": {},
732
+ "output_type": "execute_result"
733
+ }
734
+ ],
735
+ "source": [
736
+ "# Adjust column names\n",
737
+ "final_df = combined_df.rename(\n",
738
+ " columns={\n",
739
+ " \"RegionID\": \"Region ID\",\n",
740
+ " \"SizeRank\": \"Size Rank\",\n",
741
+ " \"RegionName\": \"Region\",\n",
742
+ " \"RegionType\": \"Region Type\",\n",
743
+ " \"StateName\": \"State\",\n",
744
+ " }\n",
745
+ ")\n",
746
+ "\n",
747
+ "final_df[\"Date\"] = pd.to_datetime(final_df[\"Date\"], format=\"%Y-%m-%d\")\n",
748
+ "\n",
749
+ "final_df"
750
+ ]
751
+ },
752
+ {
753
+ "cell_type": "code",
754
+ "execution_count": 8,
755
+ "metadata": {},
756
+ "outputs": [],
757
+ "source": [
758
+ "save_final_df_as_jsonl(CONFIG_NAME, final_df)"
759
+ ]
760
+ }
761
+ ],
762
+ "metadata": {
763
+ "kernelspec": {
764
+ "display_name": "Python 3",
765
+ "language": "python",
766
+ "name": "python3"
767
+ },
768
+ "language_info": {
769
+ "codemirror_mode": {
770
+ "name": "ipython",
771
+ "version": 3
772
+ },
773
+ "file_extension": ".py",
774
+ "mimetype": "text/x-python",
775
+ "name": "python",
776
+ "nbconvert_exporter": "python",
777
+ "pygments_lexer": "ipython3",
778
+ "version": "3.12.2"
779
+ }
780
+ },
781
+ "nbformat": 4,
782
+ "nbformat_minor": 2
783
+ }
processors/days_on_market.py ADDED
@@ -0,0 +1,104 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ # coding: utf-8
3
+
4
+ # In[4]:
5
+
6
+
7
+ import pandas as pd
8
+ import os
9
+
10
+ from helpers import (
11
+ get_data_path_for_config,
12
+ get_combined_df,
13
+ save_final_df_as_jsonl,
14
+ handle_slug_column_mappings,
15
+ set_home_type,
16
+ )
17
+
18
+
19
+ # In[5]:
20
+
21
+
22
+ CONFIG_NAME = "days_on_market"
23
+
24
+
25
+ # In[6]:
26
+
27
+
28
+ data_frames = []
29
+
30
+ exclude_columns = [
31
+ "RegionID",
32
+ "SizeRank",
33
+ "RegionName",
34
+ "RegionType",
35
+ "StateName",
36
+ "Home Type",
37
+ ]
38
+
39
+ slug_column_mappings = {
40
+ "_mean_listings_price_cut_amt_": "Mean Listings Price Cut Amount",
41
+ "_med_doz_pending_": "Median Days on Pending",
42
+ "_median_days_to_pending_": "Median Days to Close",
43
+ "_perc_listings_price_cut_": "Percent Listings Price Cut",
44
+ }
45
+
46
+ data_dir_path = get_data_path_for_config(CONFIG_NAME)
47
+
48
+ for filename in os.listdir(data_dir_path):
49
+ if filename.endswith(".csv"):
50
+ print("processing " + filename)
51
+ # skip month files for now since they are redundant
52
+ if "month" in filename:
53
+ continue
54
+
55
+ cur_df = pd.read_csv(os.path.join(data_dir_path, filename))
56
+
57
+ cur_df["RegionName"] = cur_df["RegionName"].astype(str)
58
+ cur_df = set_home_type(cur_df, filename)
59
+
60
+ data_frames = handle_slug_column_mappings(
61
+ data_frames, slug_column_mappings, exclude_columns, filename, cur_df
62
+ )
63
+
64
+
65
+ combined_df = get_combined_df(
66
+ data_frames,
67
+ [
68
+ "RegionID",
69
+ "SizeRank",
70
+ "RegionName",
71
+ "RegionType",
72
+ "StateName",
73
+ "Home Type",
74
+ "Date",
75
+ ],
76
+ )
77
+
78
+ combined_df
79
+
80
+
81
+ # In[7]:
82
+
83
+
84
+ # Adjust column names
85
+ final_df = combined_df.rename(
86
+ columns={
87
+ "RegionID": "Region ID",
88
+ "SizeRank": "Size Rank",
89
+ "RegionName": "Region",
90
+ "RegionType": "Region Type",
91
+ "StateName": "State",
92
+ }
93
+ )
94
+
95
+ final_df["Date"] = pd.to_datetime(final_df["Date"], format="%Y-%m-%d")
96
+
97
+ final_df
98
+
99
+
100
+ # In[8]:
101
+
102
+
103
+ save_final_df_as_jsonl(CONFIG_NAME, final_df)
104
+
processors/for_sale_listings.ipynb ADDED
@@ -0,0 +1,727 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 1,
6
+ "metadata": {},
7
+ "outputs": [],
8
+ "source": [
9
+ "import pandas as pd\n",
10
+ "import os\n",
11
+ "\n",
12
+ "from helpers import (\n",
13
+ " get_data_path_for_config,\n",
14
+ " get_combined_df,\n",
15
+ " save_final_df_as_jsonl,\n",
16
+ " handle_slug_column_mappings,\n",
17
+ " set_home_type,\n",
18
+ ")"
19
+ ]
20
+ },
21
+ {
22
+ "cell_type": "code",
23
+ "execution_count": 2,
24
+ "metadata": {},
25
+ "outputs": [],
26
+ "source": [
27
+ "CONFIG_NAME = \"for_sale_listings\""
28
+ ]
29
+ },
30
+ {
31
+ "cell_type": "code",
32
+ "execution_count": 3,
33
+ "metadata": {},
34
+ "outputs": [
35
+ {
36
+ "name": "stdout",
37
+ "output_type": "stream",
38
+ "text": [
39
+ "processing Metro_new_pending_uc_sfrcondo_sm_month.csv\n",
40
+ "processing Metro_invt_fs_uc_sfrcondo_week.csv\n",
41
+ "processing Metro_mlp_uc_sfrcondo_week.csv\n",
42
+ "processing Metro_invt_fs_uc_sfr_month.csv\n",
43
+ "processing Metro_mlp_uc_sfr_sm_month.csv\n",
44
+ "processing Metro_new_pending_uc_sfrcondo_month.csv\n",
45
+ "processing Metro_mlp_uc_sfrcondo_sm_week.csv\n",
46
+ "processing Metro_invt_fs_uc_sfrcondo_month.csv\n",
47
+ "processing Metro_mlp_uc_sfr_sm_week.csv\n",
48
+ "processing Metro_mlp_uc_sfrcondo_month.csv\n",
49
+ "processing Metro_new_pending_uc_sfrcondo_sm_week.csv\n",
50
+ "processing Metro_invt_fs_uc_sfr_sm_week.csv\n",
51
+ "processing Metro_invt_fs_uc_sfr_sm_month.csv\n",
52
+ "processing Metro_mlp_uc_sfr_month.csv\n",
53
+ "processing Metro_new_listings_uc_sfrcondo_week.csv\n",
54
+ "processing Metro_mlp_uc_sfrcondo_sm_month.csv\n",
55
+ "processing Metro_invt_fs_uc_sfrcondo_sm_week.csv\n",
56
+ "processing Metro_new_listings_uc_sfrcondo_sm_week.csv\n",
57
+ "processing Metro_new_listings_uc_sfrcondo_month.csv\n",
58
+ "processing Metro_new_pending_uc_sfrcondo_week.csv\n",
59
+ "processing Metro_invt_fs_uc_sfr_week.csv\n",
60
+ "processing Metro_new_listings_uc_sfrcondo_sm_month.csv\n",
61
+ "processing Metro_mlp_uc_sfr_week.csv\n",
62
+ "processing Metro_invt_fs_uc_sfrcondo_sm_month.csv\n"
63
+ ]
64
+ },
65
+ {
66
+ "data": {
67
+ "text/html": [
68
+ "<div>\n",
69
+ "<style scoped>\n",
70
+ " .dataframe tbody tr th:only-of-type {\n",
71
+ " vertical-align: middle;\n",
72
+ " }\n",
73
+ "\n",
74
+ " .dataframe tbody tr th {\n",
75
+ " vertical-align: top;\n",
76
+ " }\n",
77
+ "\n",
78
+ " .dataframe thead th {\n",
79
+ " text-align: right;\n",
80
+ " }\n",
81
+ "</style>\n",
82
+ "<table border=\"1\" class=\"dataframe\">\n",
83
+ " <thead>\n",
84
+ " <tr style=\"text-align: right;\">\n",
85
+ " <th></th>\n",
86
+ " <th>RegionID</th>\n",
87
+ " <th>SizeRank</th>\n",
88
+ " <th>RegionName</th>\n",
89
+ " <th>RegionType</th>\n",
90
+ " <th>StateName</th>\n",
91
+ " <th>Home Type</th>\n",
92
+ " <th>Date</th>\n",
93
+ " <th>Median Listing Price</th>\n",
94
+ " <th>Median Listing Price (Smoothed)</th>\n",
95
+ " <th>New Pending (Smoothed)</th>\n",
96
+ " <th>New Listings</th>\n",
97
+ " <th>New Listings (Smoothed)</th>\n",
98
+ " <th>New Pending</th>\n",
99
+ " </tr>\n",
100
+ " </thead>\n",
101
+ " <tbody>\n",
102
+ " <tr>\n",
103
+ " <th>0</th>\n",
104
+ " <td>102001</td>\n",
105
+ " <td>0</td>\n",
106
+ " <td>United States</td>\n",
107
+ " <td>country</td>\n",
108
+ " <td>NaN</td>\n",
109
+ " <td>SFR</td>\n",
110
+ " <td>2018-01-13</td>\n",
111
+ " <td>259000.0</td>\n",
112
+ " <td>NaN</td>\n",
113
+ " <td>NaN</td>\n",
114
+ " <td>NaN</td>\n",
115
+ " <td>NaN</td>\n",
116
+ " <td>NaN</td>\n",
117
+ " </tr>\n",
118
+ " <tr>\n",
119
+ " <th>1</th>\n",
120
+ " <td>102001</td>\n",
121
+ " <td>0</td>\n",
122
+ " <td>United States</td>\n",
123
+ " <td>country</td>\n",
124
+ " <td>NaN</td>\n",
125
+ " <td>SFR</td>\n",
126
+ " <td>2018-01-20</td>\n",
127
+ " <td>259900.0</td>\n",
128
+ " <td>NaN</td>\n",
129
+ " <td>NaN</td>\n",
130
+ " <td>NaN</td>\n",
131
+ " <td>NaN</td>\n",
132
+ " <td>NaN</td>\n",
133
+ " </tr>\n",
134
+ " <tr>\n",
135
+ " <th>2</th>\n",
136
+ " <td>102001</td>\n",
137
+ " <td>0</td>\n",
138
+ " <td>United States</td>\n",
139
+ " <td>country</td>\n",
140
+ " <td>NaN</td>\n",
141
+ " <td>SFR</td>\n",
142
+ " <td>2018-01-27</td>\n",
143
+ " <td>259900.0</td>\n",
144
+ " <td>NaN</td>\n",
145
+ " <td>NaN</td>\n",
146
+ " <td>NaN</td>\n",
147
+ " <td>NaN</td>\n",
148
+ " <td>NaN</td>\n",
149
+ " </tr>\n",
150
+ " <tr>\n",
151
+ " <th>3</th>\n",
152
+ " <td>102001</td>\n",
153
+ " <td>0</td>\n",
154
+ " <td>United States</td>\n",
155
+ " <td>country</td>\n",
156
+ " <td>NaN</td>\n",
157
+ " <td>SFR</td>\n",
158
+ " <td>2018-02-03</td>\n",
159
+ " <td>260000.0</td>\n",
160
+ " <td>259700.0</td>\n",
161
+ " <td>NaN</td>\n",
162
+ " <td>NaN</td>\n",
163
+ " <td>NaN</td>\n",
164
+ " <td>NaN</td>\n",
165
+ " </tr>\n",
166
+ " <tr>\n",
167
+ " <th>4</th>\n",
168
+ " <td>102001</td>\n",
169
+ " <td>0</td>\n",
170
+ " <td>United States</td>\n",
171
+ " <td>country</td>\n",
172
+ " <td>NaN</td>\n",
173
+ " <td>SFR</td>\n",
174
+ " <td>2018-02-10</td>\n",
175
+ " <td>264900.0</td>\n",
176
+ " <td>261175.0</td>\n",
177
+ " <td>NaN</td>\n",
178
+ " <td>NaN</td>\n",
179
+ " <td>NaN</td>\n",
180
+ " <td>NaN</td>\n",
181
+ " </tr>\n",
182
+ " <tr>\n",
183
+ " <th>...</th>\n",
184
+ " <td>...</td>\n",
185
+ " <td>...</td>\n",
186
+ " <td>...</td>\n",
187
+ " <td>...</td>\n",
188
+ " <td>...</td>\n",
189
+ " <td>...</td>\n",
190
+ " <td>...</td>\n",
191
+ " <td>...</td>\n",
192
+ " <td>...</td>\n",
193
+ " <td>...</td>\n",
194
+ " <td>...</td>\n",
195
+ " <td>...</td>\n",
196
+ " <td>...</td>\n",
197
+ " </tr>\n",
198
+ " <tr>\n",
199
+ " <th>578648</th>\n",
200
+ " <td>845172</td>\n",
201
+ " <td>769</td>\n",
202
+ " <td>Winfield, KS</td>\n",
203
+ " <td>msa</td>\n",
204
+ " <td>KS</td>\n",
205
+ " <td>all homes</td>\n",
206
+ " <td>2023-12-09</td>\n",
207
+ " <td>134950.0</td>\n",
208
+ " <td>138913.0</td>\n",
209
+ " <td>NaN</td>\n",
210
+ " <td>NaN</td>\n",
211
+ " <td>NaN</td>\n",
212
+ " <td>NaN</td>\n",
213
+ " </tr>\n",
214
+ " <tr>\n",
215
+ " <th>578649</th>\n",
216
+ " <td>845172</td>\n",
217
+ " <td>769</td>\n",
218
+ " <td>Winfield, KS</td>\n",
219
+ " <td>msa</td>\n",
220
+ " <td>KS</td>\n",
221
+ " <td>all homes</td>\n",
222
+ " <td>2023-12-16</td>\n",
223
+ " <td>120000.0</td>\n",
224
+ " <td>133938.0</td>\n",
225
+ " <td>NaN</td>\n",
226
+ " <td>NaN</td>\n",
227
+ " <td>NaN</td>\n",
228
+ " <td>NaN</td>\n",
229
+ " </tr>\n",
230
+ " <tr>\n",
231
+ " <th>578650</th>\n",
232
+ " <td>845172</td>\n",
233
+ " <td>769</td>\n",
234
+ " <td>Winfield, KS</td>\n",
235
+ " <td>msa</td>\n",
236
+ " <td>KS</td>\n",
237
+ " <td>all homes</td>\n",
238
+ " <td>2023-12-23</td>\n",
239
+ " <td>111000.0</td>\n",
240
+ " <td>126463.0</td>\n",
241
+ " <td>NaN</td>\n",
242
+ " <td>NaN</td>\n",
243
+ " <td>NaN</td>\n",
244
+ " <td>NaN</td>\n",
245
+ " </tr>\n",
246
+ " <tr>\n",
247
+ " <th>578651</th>\n",
248
+ " <td>845172</td>\n",
249
+ " <td>769</td>\n",
250
+ " <td>Winfield, KS</td>\n",
251
+ " <td>msa</td>\n",
252
+ " <td>KS</td>\n",
253
+ " <td>all homes</td>\n",
254
+ " <td>2023-12-30</td>\n",
255
+ " <td>126950.0</td>\n",
256
+ " <td>123225.0</td>\n",
257
+ " <td>NaN</td>\n",
258
+ " <td>NaN</td>\n",
259
+ " <td>NaN</td>\n",
260
+ " <td>NaN</td>\n",
261
+ " </tr>\n",
262
+ " <tr>\n",
263
+ " <th>578652</th>\n",
264
+ " <td>845172</td>\n",
265
+ " <td>769</td>\n",
266
+ " <td>Winfield, KS</td>\n",
267
+ " <td>msa</td>\n",
268
+ " <td>KS</td>\n",
269
+ " <td>all homes</td>\n",
270
+ " <td>2024-01-06</td>\n",
271
+ " <td>128000.0</td>\n",
272
+ " <td>121488.0</td>\n",
273
+ " <td>NaN</td>\n",
274
+ " <td>NaN</td>\n",
275
+ " <td>NaN</td>\n",
276
+ " <td>NaN</td>\n",
277
+ " </tr>\n",
278
+ " </tbody>\n",
279
+ "</table>\n",
280
+ "<p>578653 rows Γ— 13 columns</p>\n",
281
+ "</div>"
282
+ ],
283
+ "text/plain": [
284
+ " RegionID SizeRank RegionName RegionType StateName Home Type \\\n",
285
+ "0 102001 0 United States country NaN SFR \n",
286
+ "1 102001 0 United States country NaN SFR \n",
287
+ "2 102001 0 United States country NaN SFR \n",
288
+ "3 102001 0 United States country NaN SFR \n",
289
+ "4 102001 0 United States country NaN SFR \n",
290
+ "... ... ... ... ... ... ... \n",
291
+ "578648 845172 769 Winfield, KS msa KS all homes \n",
292
+ "578649 845172 769 Winfield, KS msa KS all homes \n",
293
+ "578650 845172 769 Winfield, KS msa KS all homes \n",
294
+ "578651 845172 769 Winfield, KS msa KS all homes \n",
295
+ "578652 845172 769 Winfield, KS msa KS all homes \n",
296
+ "\n",
297
+ " Date Median Listing Price Median Listing Price (Smoothed) \\\n",
298
+ "0 2018-01-13 259000.0 NaN \n",
299
+ "1 2018-01-20 259900.0 NaN \n",
300
+ "2 2018-01-27 259900.0 NaN \n",
301
+ "3 2018-02-03 260000.0 259700.0 \n",
302
+ "4 2018-02-10 264900.0 261175.0 \n",
303
+ "... ... ... ... \n",
304
+ "578648 2023-12-09 134950.0 138913.0 \n",
305
+ "578649 2023-12-16 120000.0 133938.0 \n",
306
+ "578650 2023-12-23 111000.0 126463.0 \n",
307
+ "578651 2023-12-30 126950.0 123225.0 \n",
308
+ "578652 2024-01-06 128000.0 121488.0 \n",
309
+ "\n",
310
+ " New Pending (Smoothed) New Listings New Listings (Smoothed) \\\n",
311
+ "0 NaN NaN NaN \n",
312
+ "1 NaN NaN NaN \n",
313
+ "2 NaN NaN NaN \n",
314
+ "3 NaN NaN NaN \n",
315
+ "4 NaN NaN NaN \n",
316
+ "... ... ... ... \n",
317
+ "578648 NaN NaN NaN \n",
318
+ "578649 NaN NaN NaN \n",
319
+ "578650 NaN NaN NaN \n",
320
+ "578651 NaN NaN NaN \n",
321
+ "578652 NaN NaN NaN \n",
322
+ "\n",
323
+ " New Pending \n",
324
+ "0 NaN \n",
325
+ "1 NaN \n",
326
+ "2 NaN \n",
327
+ "3 NaN \n",
328
+ "4 NaN \n",
329
+ "... ... \n",
330
+ "578648 NaN \n",
331
+ "578649 NaN \n",
332
+ "578650 NaN \n",
333
+ "578651 NaN \n",
334
+ "578652 NaN \n",
335
+ "\n",
336
+ "[578653 rows x 13 columns]"
337
+ ]
338
+ },
339
+ "execution_count": 3,
340
+ "metadata": {},
341
+ "output_type": "execute_result"
342
+ }
343
+ ],
344
+ "source": [
345
+ "data_frames = []\n",
346
+ "\n",
347
+ "exclude_columns = [\n",
348
+ " \"RegionID\",\n",
349
+ " \"SizeRank\",\n",
350
+ " \"RegionName\",\n",
351
+ " \"RegionType\",\n",
352
+ " \"StateName\",\n",
353
+ " \"Home Type\",\n",
354
+ "]\n",
355
+ "\n",
356
+ "slug_column_mappings = {\n",
357
+ " \"_mlp_\": \"Median Listing Price\",\n",
358
+ " \"_new_listings_\": \"New Listings\",\n",
359
+ " \"new_pending\": \"New Pending\",\n",
360
+ "}\n",
361
+ "\n",
362
+ "data_dir_path = get_data_path_for_config(CONFIG_NAME)\n",
363
+ "\n",
364
+ "for filename in os.listdir(data_dir_path):\n",
365
+ " if filename.endswith(\".csv\"):\n",
366
+ " print(\"processing \" + filename)\n",
367
+ " cur_df = pd.read_csv(os.path.join(data_dir_path, filename))\n",
368
+ "\n",
369
+ " # ignore monthly data for now since it is redundant\n",
370
+ " if \"month\" in filename:\n",
371
+ " continue\n",
372
+ "\n",
373
+ " cur_df = set_home_type(cur_df, filename)\n",
374
+ "\n",
375
+ " data_frames = handle_slug_column_mappings(\n",
376
+ " data_frames, slug_column_mappings, exclude_columns, filename, cur_df\n",
377
+ " )\n",
378
+ "\n",
379
+ "combined_df = get_combined_df(\n",
380
+ " data_frames,\n",
381
+ " [\n",
382
+ " \"RegionID\",\n",
383
+ " \"SizeRank\",\n",
384
+ " \"RegionName\",\n",
385
+ " \"RegionType\",\n",
386
+ " \"StateName\",\n",
387
+ " \"Home Type\",\n",
388
+ " \"Date\",\n",
389
+ " ],\n",
390
+ ")\n",
391
+ "\n",
392
+ "combined_df"
393
+ ]
394
+ },
395
+ {
396
+ "cell_type": "code",
397
+ "execution_count": 4,
398
+ "metadata": {},
399
+ "outputs": [
400
+ {
401
+ "data": {
402
+ "text/html": [
403
+ "<div>\n",
404
+ "<style scoped>\n",
405
+ " .dataframe tbody tr th:only-of-type {\n",
406
+ " vertical-align: middle;\n",
407
+ " }\n",
408
+ "\n",
409
+ " .dataframe tbody tr th {\n",
410
+ " vertical-align: top;\n",
411
+ " }\n",
412
+ "\n",
413
+ " .dataframe thead th {\n",
414
+ " text-align: right;\n",
415
+ " }\n",
416
+ "</style>\n",
417
+ "<table border=\"1\" class=\"dataframe\">\n",
418
+ " <thead>\n",
419
+ " <tr style=\"text-align: right;\">\n",
420
+ " <th></th>\n",
421
+ " <th>Region ID</th>\n",
422
+ " <th>Size Rank</th>\n",
423
+ " <th>Region</th>\n",
424
+ " <th>Region Type</th>\n",
425
+ " <th>State</th>\n",
426
+ " <th>Home Type</th>\n",
427
+ " <th>Date</th>\n",
428
+ " <th>Median Listing Price</th>\n",
429
+ " <th>Median Listing Price (Smoothed)</th>\n",
430
+ " <th>New Pending (Smoothed)</th>\n",
431
+ " <th>New Listings</th>\n",
432
+ " <th>New Listings (Smoothed)</th>\n",
433
+ " <th>New Pending</th>\n",
434
+ " </tr>\n",
435
+ " </thead>\n",
436
+ " <tbody>\n",
437
+ " <tr>\n",
438
+ " <th>0</th>\n",
439
+ " <td>102001</td>\n",
440
+ " <td>0</td>\n",
441
+ " <td>United States</td>\n",
442
+ " <td>country</td>\n",
443
+ " <td>NaN</td>\n",
444
+ " <td>SFR</td>\n",
445
+ " <td>2018-01-13</td>\n",
446
+ " <td>259000.0</td>\n",
447
+ " <td>NaN</td>\n",
448
+ " <td>NaN</td>\n",
449
+ " <td>NaN</td>\n",
450
+ " <td>NaN</td>\n",
451
+ " <td>NaN</td>\n",
452
+ " </tr>\n",
453
+ " <tr>\n",
454
+ " <th>1</th>\n",
455
+ " <td>102001</td>\n",
456
+ " <td>0</td>\n",
457
+ " <td>United States</td>\n",
458
+ " <td>country</td>\n",
459
+ " <td>NaN</td>\n",
460
+ " <td>SFR</td>\n",
461
+ " <td>2018-01-20</td>\n",
462
+ " <td>259900.0</td>\n",
463
+ " <td>NaN</td>\n",
464
+ " <td>NaN</td>\n",
465
+ " <td>NaN</td>\n",
466
+ " <td>NaN</td>\n",
467
+ " <td>NaN</td>\n",
468
+ " </tr>\n",
469
+ " <tr>\n",
470
+ " <th>2</th>\n",
471
+ " <td>102001</td>\n",
472
+ " <td>0</td>\n",
473
+ " <td>United States</td>\n",
474
+ " <td>country</td>\n",
475
+ " <td>NaN</td>\n",
476
+ " <td>SFR</td>\n",
477
+ " <td>2018-01-27</td>\n",
478
+ " <td>259900.0</td>\n",
479
+ " <td>NaN</td>\n",
480
+ " <td>NaN</td>\n",
481
+ " <td>NaN</td>\n",
482
+ " <td>NaN</td>\n",
483
+ " <td>NaN</td>\n",
484
+ " </tr>\n",
485
+ " <tr>\n",
486
+ " <th>3</th>\n",
487
+ " <td>102001</td>\n",
488
+ " <td>0</td>\n",
489
+ " <td>United States</td>\n",
490
+ " <td>country</td>\n",
491
+ " <td>NaN</td>\n",
492
+ " <td>SFR</td>\n",
493
+ " <td>2018-02-03</td>\n",
494
+ " <td>260000.0</td>\n",
495
+ " <td>259700.0</td>\n",
496
+ " <td>NaN</td>\n",
497
+ " <td>NaN</td>\n",
498
+ " <td>NaN</td>\n",
499
+ " <td>NaN</td>\n",
500
+ " </tr>\n",
501
+ " <tr>\n",
502
+ " <th>4</th>\n",
503
+ " <td>102001</td>\n",
504
+ " <td>0</td>\n",
505
+ " <td>United States</td>\n",
506
+ " <td>country</td>\n",
507
+ " <td>NaN</td>\n",
508
+ " <td>SFR</td>\n",
509
+ " <td>2018-02-10</td>\n",
510
+ " <td>264900.0</td>\n",
511
+ " <td>261175.0</td>\n",
512
+ " <td>NaN</td>\n",
513
+ " <td>NaN</td>\n",
514
+ " <td>NaN</td>\n",
515
+ " <td>NaN</td>\n",
516
+ " </tr>\n",
517
+ " <tr>\n",
518
+ " <th>...</th>\n",
519
+ " <td>...</td>\n",
520
+ " <td>...</td>\n",
521
+ " <td>...</td>\n",
522
+ " <td>...</td>\n",
523
+ " <td>...</td>\n",
524
+ " <td>...</td>\n",
525
+ " <td>...</td>\n",
526
+ " <td>...</td>\n",
527
+ " <td>...</td>\n",
528
+ " <td>...</td>\n",
529
+ " <td>...</td>\n",
530
+ " <td>...</td>\n",
531
+ " <td>...</td>\n",
532
+ " </tr>\n",
533
+ " <tr>\n",
534
+ " <th>578648</th>\n",
535
+ " <td>845172</td>\n",
536
+ " <td>769</td>\n",
537
+ " <td>Winfield, KS</td>\n",
538
+ " <td>msa</td>\n",
539
+ " <td>KS</td>\n",
540
+ " <td>all homes</td>\n",
541
+ " <td>2023-12-09</td>\n",
542
+ " <td>134950.0</td>\n",
543
+ " <td>138913.0</td>\n",
544
+ " <td>NaN</td>\n",
545
+ " <td>NaN</td>\n",
546
+ " <td>NaN</td>\n",
547
+ " <td>NaN</td>\n",
548
+ " </tr>\n",
549
+ " <tr>\n",
550
+ " <th>578649</th>\n",
551
+ " <td>845172</td>\n",
552
+ " <td>769</td>\n",
553
+ " <td>Winfield, KS</td>\n",
554
+ " <td>msa</td>\n",
555
+ " <td>KS</td>\n",
556
+ " <td>all homes</td>\n",
557
+ " <td>2023-12-16</td>\n",
558
+ " <td>120000.0</td>\n",
559
+ " <td>133938.0</td>\n",
560
+ " <td>NaN</td>\n",
561
+ " <td>NaN</td>\n",
562
+ " <td>NaN</td>\n",
563
+ " <td>NaN</td>\n",
564
+ " </tr>\n",
565
+ " <tr>\n",
566
+ " <th>578650</th>\n",
567
+ " <td>845172</td>\n",
568
+ " <td>769</td>\n",
569
+ " <td>Winfield, KS</td>\n",
570
+ " <td>msa</td>\n",
571
+ " <td>KS</td>\n",
572
+ " <td>all homes</td>\n",
573
+ " <td>2023-12-23</td>\n",
574
+ " <td>111000.0</td>\n",
575
+ " <td>126463.0</td>\n",
576
+ " <td>NaN</td>\n",
577
+ " <td>NaN</td>\n",
578
+ " <td>NaN</td>\n",
579
+ " <td>NaN</td>\n",
580
+ " </tr>\n",
581
+ " <tr>\n",
582
+ " <th>578651</th>\n",
583
+ " <td>845172</td>\n",
584
+ " <td>769</td>\n",
585
+ " <td>Winfield, KS</td>\n",
586
+ " <td>msa</td>\n",
587
+ " <td>KS</td>\n",
588
+ " <td>all homes</td>\n",
589
+ " <td>2023-12-30</td>\n",
590
+ " <td>126950.0</td>\n",
591
+ " <td>123225.0</td>\n",
592
+ " <td>NaN</td>\n",
593
+ " <td>NaN</td>\n",
594
+ " <td>NaN</td>\n",
595
+ " <td>NaN</td>\n",
596
+ " </tr>\n",
597
+ " <tr>\n",
598
+ " <th>578652</th>\n",
599
+ " <td>845172</td>\n",
600
+ " <td>769</td>\n",
601
+ " <td>Winfield, KS</td>\n",
602
+ " <td>msa</td>\n",
603
+ " <td>KS</td>\n",
604
+ " <td>all homes</td>\n",
605
+ " <td>2024-01-06</td>\n",
606
+ " <td>128000.0</td>\n",
607
+ " <td>121488.0</td>\n",
608
+ " <td>NaN</td>\n",
609
+ " <td>NaN</td>\n",
610
+ " <td>NaN</td>\n",
611
+ " <td>NaN</td>\n",
612
+ " </tr>\n",
613
+ " </tbody>\n",
614
+ "</table>\n",
615
+ "<p>578653 rows Γ— 13 columns</p>\n",
616
+ "</div>"
617
+ ],
618
+ "text/plain": [
619
+ " Region ID Size Rank Region Region Type State Home Type \\\n",
620
+ "0 102001 0 United States country NaN SFR \n",
621
+ "1 102001 0 United States country NaN SFR \n",
622
+ "2 102001 0 United States country NaN SFR \n",
623
+ "3 102001 0 United States country NaN SFR \n",
624
+ "4 102001 0 United States country NaN SFR \n",
625
+ "... ... ... ... ... ... ... \n",
626
+ "578648 845172 769 Winfield, KS msa KS all homes \n",
627
+ "578649 845172 769 Winfield, KS msa KS all homes \n",
628
+ "578650 845172 769 Winfield, KS msa KS all homes \n",
629
+ "578651 845172 769 Winfield, KS msa KS all homes \n",
630
+ "578652 845172 769 Winfield, KS msa KS all homes \n",
631
+ "\n",
632
+ " Date Median Listing Price Median Listing Price (Smoothed) \\\n",
633
+ "0 2018-01-13 259000.0 NaN \n",
634
+ "1 2018-01-20 259900.0 NaN \n",
635
+ "2 2018-01-27 259900.0 NaN \n",
636
+ "3 2018-02-03 260000.0 259700.0 \n",
637
+ "4 2018-02-10 264900.0 261175.0 \n",
638
+ "... ... ... ... \n",
639
+ "578648 2023-12-09 134950.0 138913.0 \n",
640
+ "578649 2023-12-16 120000.0 133938.0 \n",
641
+ "578650 2023-12-23 111000.0 126463.0 \n",
642
+ "578651 2023-12-30 126950.0 123225.0 \n",
643
+ "578652 2024-01-06 128000.0 121488.0 \n",
644
+ "\n",
645
+ " New Pending (Smoothed) New Listings New Listings (Smoothed) \\\n",
646
+ "0 NaN NaN NaN \n",
647
+ "1 NaN NaN NaN \n",
648
+ "2 NaN NaN NaN \n",
649
+ "3 NaN NaN NaN \n",
650
+ "4 NaN NaN NaN \n",
651
+ "... ... ... ... \n",
652
+ "578648 NaN NaN NaN \n",
653
+ "578649 NaN NaN NaN \n",
654
+ "578650 NaN NaN NaN \n",
655
+ "578651 NaN NaN NaN \n",
656
+ "578652 NaN NaN NaN \n",
657
+ "\n",
658
+ " New Pending \n",
659
+ "0 NaN \n",
660
+ "1 NaN \n",
661
+ "2 NaN \n",
662
+ "3 NaN \n",
663
+ "4 NaN \n",
664
+ "... ... \n",
665
+ "578648 NaN \n",
666
+ "578649 NaN \n",
667
+ "578650 NaN \n",
668
+ "578651 NaN \n",
669
+ "578652 NaN \n",
670
+ "\n",
671
+ "[578653 rows x 13 columns]"
672
+ ]
673
+ },
674
+ "execution_count": 4,
675
+ "metadata": {},
676
+ "output_type": "execute_result"
677
+ }
678
+ ],
679
+ "source": [
680
+ "# Adjust column names\n",
681
+ "final_df = combined_df.rename(\n",
682
+ " columns={\n",
683
+ " \"RegionID\": \"Region ID\",\n",
684
+ " \"SizeRank\": \"Size Rank\",\n",
685
+ " \"RegionName\": \"Region\",\n",
686
+ " \"RegionType\": \"Region Type\",\n",
687
+ " \"StateName\": \"State\",\n",
688
+ " }\n",
689
+ ")\n",
690
+ "\n",
691
+ "final_df[\"Date\"] = pd.to_datetime(final_df[\"Date\"], format=\"%Y-%m-%d\")\n",
692
+ "\n",
693
+ "final_df"
694
+ ]
695
+ },
696
+ {
697
+ "cell_type": "code",
698
+ "execution_count": 5,
699
+ "metadata": {},
700
+ "outputs": [],
701
+ "source": [
702
+ "save_final_df_as_jsonl(CONFIG_NAME, final_df)"
703
+ ]
704
+ }
705
+ ],
706
+ "metadata": {
707
+ "kernelspec": {
708
+ "display_name": "Python 3",
709
+ "language": "python",
710
+ "name": "python3"
711
+ },
712
+ "language_info": {
713
+ "codemirror_mode": {
714
+ "name": "ipython",
715
+ "version": 3
716
+ },
717
+ "file_extension": ".py",
718
+ "mimetype": "text/x-python",
719
+ "name": "python",
720
+ "nbconvert_exporter": "python",
721
+ "pygments_lexer": "ipython3",
722
+ "version": "3.12.2"
723
+ }
724
+ },
725
+ "nbformat": 4,
726
+ "nbformat_minor": 2
727
+ }
processors/for_sale_listings.py ADDED
@@ -0,0 +1,101 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ # coding: utf-8
3
+
4
+ # In[1]:
5
+
6
+
7
+ import pandas as pd
8
+ import os
9
+
10
+ from helpers import (
11
+ get_data_path_for_config,
12
+ get_combined_df,
13
+ save_final_df_as_jsonl,
14
+ handle_slug_column_mappings,
15
+ set_home_type,
16
+ )
17
+
18
+
19
+ # In[2]:
20
+
21
+
22
+ CONFIG_NAME = "for_sale_listings"
23
+
24
+
25
+ # In[3]:
26
+
27
+
28
+ data_frames = []
29
+
30
+ exclude_columns = [
31
+ "RegionID",
32
+ "SizeRank",
33
+ "RegionName",
34
+ "RegionType",
35
+ "StateName",
36
+ "Home Type",
37
+ ]
38
+
39
+ slug_column_mappings = {
40
+ "_mlp_": "Median Listing Price",
41
+ "_new_listings_": "New Listings",
42
+ "new_pending": "New Pending",
43
+ }
44
+
45
+ data_dir_path = get_data_path_for_config(CONFIG_NAME)
46
+
47
+ for filename in os.listdir(data_dir_path):
48
+ if filename.endswith(".csv"):
49
+ print("processing " + filename)
50
+ cur_df = pd.read_csv(os.path.join(data_dir_path, filename))
51
+
52
+ # ignore monthly data for now since it is redundant
53
+ if "month" in filename:
54
+ continue
55
+
56
+ cur_df = set_home_type(cur_df, filename)
57
+
58
+ data_frames = handle_slug_column_mappings(
59
+ data_frames, slug_column_mappings, exclude_columns, filename, cur_df
60
+ )
61
+
62
+ combined_df = get_combined_df(
63
+ data_frames,
64
+ [
65
+ "RegionID",
66
+ "SizeRank",
67
+ "RegionName",
68
+ "RegionType",
69
+ "StateName",
70
+ "Home Type",
71
+ "Date",
72
+ ],
73
+ )
74
+
75
+ combined_df
76
+
77
+
78
+ # In[4]:
79
+
80
+
81
+ # Adjust column names
82
+ final_df = combined_df.rename(
83
+ columns={
84
+ "RegionID": "Region ID",
85
+ "SizeRank": "Size Rank",
86
+ "RegionName": "Region",
87
+ "RegionType": "Region Type",
88
+ "StateName": "State",
89
+ }
90
+ )
91
+
92
+ final_df["Date"] = pd.to_datetime(final_df["Date"], format="%Y-%m-%d")
93
+
94
+ final_df
95
+
96
+
97
+ # In[5]:
98
+
99
+
100
+ save_final_df_as_jsonl(CONFIG_NAME, final_df)
101
+
processors/helpers.py ADDED
@@ -0,0 +1,120 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pandas as pd
2
+ import os
3
+
4
+
5
+ def get_data_path_for_config(config_name):
6
+ data_dir = "../data"
7
+ return os.path.join(data_dir, config_name)
8
+
9
+
10
+ def coalesce_columns(
11
+ df,
12
+ ):
13
+ columns_to_coalesce = [col for col in df.columns if "_" not in col]
14
+ for index, row in df.iterrows():
15
+ for col in df.columns:
16
+ for column_to_coalesce in columns_to_coalesce:
17
+ if column_to_coalesce in col and "_" in col:
18
+ if not pd.isna(row[col]):
19
+ df.at[index, column_to_coalesce] = row[col]
20
+ continue
21
+
22
+ # remove columns with underscores
23
+ combined_df = df[columns_to_coalesce]
24
+ return combined_df
25
+
26
+
27
+ def set_home_type(cur_df, filename):
28
+ if "_sfrcondo_" in filename:
29
+ cur_df["Home Type"] = "all homes"
30
+ if "_sfrcondomfr_" in filename:
31
+ cur_df["Home Type"] = "all homes plus multifamily"
32
+ elif "_sfr_" in filename:
33
+ cur_df["Home Type"] = "SFR"
34
+ elif "_condo_" in filename:
35
+ cur_df["Home Type"] = "condo/co-op"
36
+ elif "_mfr_" in filename:
37
+ cur_df["Home Type"] = "multifamily"
38
+
39
+ return cur_df
40
+
41
+
42
+ def get_combined_df(data_frames, on):
43
+ combined_df = None
44
+ if len(data_frames) > 1:
45
+ # iterate over dataframes and merge or concat
46
+ combined_df = data_frames[0]
47
+ for i in range(1, len(data_frames)):
48
+ cur_df = data_frames[i]
49
+ combined_df = pd.merge(
50
+ combined_df,
51
+ cur_df,
52
+ on=on,
53
+ how="outer",
54
+ suffixes=("", "_" + str(i)),
55
+ )
56
+ elif len(data_frames) == 1:
57
+ combined_df = data_frames[0]
58
+
59
+ combined_df = coalesce_columns(combined_df)
60
+
61
+ return combined_df
62
+
63
+
64
+ def get_melted_df(
65
+ df,
66
+ exclude_columns,
67
+ columns_to_pivot,
68
+ col_name,
69
+ filename,
70
+ ):
71
+ smoothed = "_sm_" in filename
72
+ seasonally_adjusted = "_sa_" in filename
73
+
74
+ if smoothed:
75
+ col_name += " (Smoothed)"
76
+ if seasonally_adjusted:
77
+ col_name += " (Seasonally Adjusted)"
78
+
79
+ df = pd.melt(
80
+ df,
81
+ id_vars=exclude_columns,
82
+ value_vars=columns_to_pivot,
83
+ var_name="Date",
84
+ value_name=col_name,
85
+ )
86
+
87
+ return df
88
+
89
+
90
+ def save_final_df_as_jsonl(config_name, df):
91
+ processed_dir = "../processed/"
92
+
93
+ if not os.path.exists(processed_dir):
94
+ os.makedirs(processed_dir)
95
+
96
+ full_path = os.path.join(processed_dir, config_name + ".jsonl")
97
+
98
+ df.to_json(full_path, orient="records", lines=True)
99
+
100
+
101
+ def handle_slug_column_mappings(
102
+ data_frames, slug_column_mappings, exclude_columns, filename, cur_df
103
+ ):
104
+ # Identify columns to pivot
105
+ columns_to_pivot = [col for col in cur_df.columns if col not in exclude_columns]
106
+
107
+ for slug, col_name in slug_column_mappings.items():
108
+ if slug in filename:
109
+ cur_df = get_melted_df(
110
+ cur_df,
111
+ exclude_columns,
112
+ columns_to_pivot,
113
+ col_name,
114
+ filename,
115
+ )
116
+
117
+ data_frames.append(cur_df)
118
+ break
119
+
120
+ return data_frames
processors/home_values.ipynb ADDED
@@ -0,0 +1,1085 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 1,
6
+ "metadata": {},
7
+ "outputs": [],
8
+ "source": [
9
+ "import pandas as pd\n",
10
+ "import os\n",
11
+ "\n",
12
+ "from helpers import (\n",
13
+ " get_data_path_for_config,\n",
14
+ " get_combined_df,\n",
15
+ " save_final_df_as_jsonl,\n",
16
+ " handle_slug_column_mappings,\n",
17
+ " set_home_type,\n",
18
+ ")"
19
+ ]
20
+ },
21
+ {
22
+ "cell_type": "code",
23
+ "execution_count": 2,
24
+ "metadata": {},
25
+ "outputs": [],
26
+ "source": [
27
+ "CONFIG_NAME = \"home_values\""
28
+ ]
29
+ },
30
+ {
31
+ "cell_type": "code",
32
+ "execution_count": 3,
33
+ "metadata": {},
34
+ "outputs": [
35
+ {
36
+ "name": "stdout",
37
+ "output_type": "stream",
38
+ "text": [
39
+ "processing City_zhvi_uc_condo_tier_0.33_0.67_sm_sa_month.csv\n",
40
+ "processing City_zhvi_uc_sfrcondo_tier_0.33_0.67_sm_sa_month.csv\n",
41
+ "processing Metro_zhvi_uc_sfrcondo_tier_0.67_1.0_sm_sa_month.csv\n",
42
+ "processing County_zhvi_uc_sfrcondo_tier_0.67_1.0_sm_sa_month.csv\n",
43
+ "processing Metro_zhvi_bdrmcnt_2_uc_sfrcondo_tier_0.33_0.67_sm_sa_month.csv\n",
44
+ "processing County_zhvi_bdrmcnt_4_uc_sfrcondo_tier_0.33_0.67_sm_sa_month.csv\n",
45
+ "processing County_zhvi_uc_sfr_tier_0.33_0.67_sm_sa_month.csv\n",
46
+ "processing Neighborhood_zhvi_bdrmcnt_4_uc_sfrcondo_tier_0.33_0.67_sm_sa_month.csv\n",
47
+ "processing State_zhvi_uc_sfr_tier_0.33_0.67_sm_sa_month.csv\n",
48
+ "processing County_zhvi_uc_condo_tier_0.33_0.67_sm_sa_month.csv\n",
49
+ "processing City_zhvi_bdrmcnt_4_uc_sfrcondo_tier_0.33_0.67_sm_sa_month.csv\n",
50
+ "processing State_zhvi_bdrmcnt_5_uc_sfrcondo_tier_0.33_0.67_sm_sa_month.csv\n",
51
+ "processing Zip_zhvi_bdrmcnt_2_uc_sfrcondo_tier_0.33_0.67_sm_sa_month.csv\n",
52
+ "processing City_zhvi_uc_sfrcondo_tier_0.67_1.0_sm_sa_month.csv\n",
53
+ "processing Zip_zhvi_uc_condo_tier_0.33_0.67_sm_sa_month.csv\n",
54
+ "processing Neighborhood_zhvi_uc_sfr_sm_sa_month.csv\n",
55
+ "processing Metro_zhvi_uc_sfr_tier_0.33_0.67_sm_sa_month.csv\n",
56
+ "processing State_zhvi_uc_sfrcondo_tier_0.33_0.67_sm_sa_month.csv\n",
57
+ "processing Zip_zhvi_bdrmcnt_1_uc_sfrcondo_tier_0.33_0.67_sm_sa_month.csv\n",
58
+ "processing County_zhvi_bdrmcnt_5_uc_sfrcondo_tier_0.33_0.67_sm_sa_month.csv\n",
59
+ "processing Metro_zhvi_uc_condo_tier_0.33_0.67_sm_sa_month.csv\n",
60
+ "processing Metro_zhvi_bdrmcnt_3_uc_sfrcondo_tier_0.33_0.67_sm_sa_month.csv\n",
61
+ "processing Neighborhood_zhvi_bdrmcnt_5_uc_sfrcondo_tier_0.33_0.67_sm_sa_month.csv\n",
62
+ "processing Zip_zhvi_uc_sfrcondo_tier_0.33_0.67_sm_sa_month.csv\n",
63
+ "processing State_zhvi_uc_sfrcondo_tier_0.0_0.33_sm_sa_month.csv\n",
64
+ "processing Metro_zhvi_bdrmcnt_1_uc_sfrcondo_tier_0.33_0.67_sm_sa_month.csv\n",
65
+ "processing Zip_zhvi_uc_sfr_tier_0.33_0.67_sm_sa_month.csv\n",
66
+ "processing City_zhvi_bdrmcnt_5_uc_sfrcondo_tier_0.33_0.67_sm_sa_month.csv\n",
67
+ "processing State_zhvi_bdrmcnt_4_uc_sfrcondo_tier_0.33_0.67_sm_sa_month.csv\n",
68
+ "processing State_zhvi_uc_condo_tier_0.33_0.67_sm_sa_month.csv\n",
69
+ "processing Zip_zhvi_bdrmcnt_3_uc_sfrcondo_tier_0.33_0.67_sm_sa_month.csv\n",
70
+ "processing Neighborhood_zhvi_bdrmcnt_1_uc_sfrcondo_tier_0.33_0.67_sm_sa_month.csv\n",
71
+ "processing City_zhvi_bdrmcnt_3_uc_sfrcondo_tier_0.33_0.67_sm_sa_month.csv\n",
72
+ "processing County_zhvi_bdrmcnt_1_uc_sfrcondo_tier_0.33_0.67_sm_sa_month.csv\n",
73
+ "processing Neighborhood_zhvi_uc_condo_tier_0.33_0.67_sm_sa_month.csv\n",
74
+ "processing Metro_zhvi_uc_sfrcondo_tier_0.33_0.67_month.csv\n",
75
+ "processing Zip_zhvi_bdrmcnt_5_uc_sfrcondo_tier_0.33_0.67_sm_sa_month.csv\n",
76
+ "processing County_zhvi_uc_sfrcondo_tier_0.0_0.33_sm_sa_month.csv\n",
77
+ "processing State_zhvi_bdrmcnt_2_uc_sfrcondo_tier_0.33_0.67_sm_sa_month.csv\n",
78
+ "processing Metro_zhvi_uc_sfrcondo_tier_0.0_0.33_sm_sa_month.csv\n",
79
+ "processing City_zhvi_uc_sfr_tier_0.33_0.67_sm_sa_month.csv\n",
80
+ "processing City_zhvi_bdrmcnt_1_uc_sfrcondo_tier_0.33_0.67_sm_sa_month.csv\n",
81
+ "processing Neighborhood_zhvi_bdrmcnt_3_uc_sfrcondo_tier_0.33_0.67_sm_sa_month.csv\n",
82
+ "processing Metro_zhvi_uc_sfrcondo_tier_0.33_0.67_sm_sa_month.csv\n",
83
+ "processing Metro_zhvi_bdrmcnt_5_uc_sfrcondo_tier_0.33_0.67_sm_sa_month.csv\n",
84
+ "processing County_zhvi_bdrmcnt_3_uc_sfrcondo_tier_0.33_0.67_sm_sa_month.csv\n",
85
+ "processing City_zhvi_bdrmcnt_2_uc_sfrcondo_tier_0.33_0.67_sm_sa_month.csv\n",
86
+ "processing Neighborhood_zhvi_uc_sfrcondo_tier_0.33_0.67_sm_sa_month.csv\n",
87
+ "processing State_zhvi_uc_sfrcondo_tier_0.67_1.0_sm_sa_month.csv\n",
88
+ "processing Zip_zhvi_bdrmcnt_4_uc_sfrcondo_tier_0.33_0.67_sm_sa_month.csv\n",
89
+ "processing State_zhvi_bdrmcnt_3_uc_sfrcondo_tier_0.33_0.67_sm_sa_month.csv\n",
90
+ "processing State_zhvi_bdrmcnt_1_uc_sfrcondo_tier_0.33_0.67_sm_sa_month.csv\n",
91
+ "processing Neighborhood_zhvi_bdrmcnt_2_uc_sfrcondo_tier_0.33_0.67_sm_sa_month.csv\n",
92
+ "processing County_zhvi_uc_sfrcondo_tier_0.33_0.67_sm_sa_month.csv\n",
93
+ "processing County_zhvi_bdrmcnt_2_uc_sfrcondo_tier_0.33_0.67_sm_sa_month.csv\n",
94
+ "processing Metro_zhvi_bdrmcnt_4_uc_sfrcondo_tier_0.33_0.67_sm_sa_month.csv\n"
95
+ ]
96
+ },
97
+ {
98
+ "data": {
99
+ "text/html": [
100
+ "<div>\n",
101
+ "<style scoped>\n",
102
+ " .dataframe tbody tr th:only-of-type {\n",
103
+ " vertical-align: middle;\n",
104
+ " }\n",
105
+ "\n",
106
+ " .dataframe tbody tr th {\n",
107
+ " vertical-align: top;\n",
108
+ " }\n",
109
+ "\n",
110
+ " .dataframe thead th {\n",
111
+ " text-align: right;\n",
112
+ " }\n",
113
+ "</style>\n",
114
+ "<table border=\"1\" class=\"dataframe\">\n",
115
+ " <thead>\n",
116
+ " <tr style=\"text-align: right;\">\n",
117
+ " <th></th>\n",
118
+ " <th>RegionID</th>\n",
119
+ " <th>SizeRank</th>\n",
120
+ " <th>RegionName</th>\n",
121
+ " <th>RegionType</th>\n",
122
+ " <th>StateName</th>\n",
123
+ " <th>Bedroom Count</th>\n",
124
+ " <th>Home Type</th>\n",
125
+ " <th>Date</th>\n",
126
+ " <th>Mid Tier ZHVI (Smoothed) (Seasonally Adjusted)</th>\n",
127
+ " <th>Bottom Tier ZHVI (Smoothed) (Seasonally Adjusted)</th>\n",
128
+ " <th>Top Tier ZHVI (Smoothed) (Seasonally Adjusted)</th>\n",
129
+ " </tr>\n",
130
+ " </thead>\n",
131
+ " <tbody>\n",
132
+ " <tr>\n",
133
+ " <th>0</th>\n",
134
+ " <td>3</td>\n",
135
+ " <td>48</td>\n",
136
+ " <td>Alaska</td>\n",
137
+ " <td>state</td>\n",
138
+ " <td>nan</td>\n",
139
+ " <td>1-Bedroom</td>\n",
140
+ " <td>all homes</td>\n",
141
+ " <td>2000-01-31</td>\n",
142
+ " <td>81310.639504</td>\n",
143
+ " <td>NaN</td>\n",
144
+ " <td>NaN</td>\n",
145
+ " </tr>\n",
146
+ " <tr>\n",
147
+ " <th>1</th>\n",
148
+ " <td>3</td>\n",
149
+ " <td>48</td>\n",
150
+ " <td>Alaska</td>\n",
151
+ " <td>state</td>\n",
152
+ " <td>nan</td>\n",
153
+ " <td>1-Bedroom</td>\n",
154
+ " <td>all homes</td>\n",
155
+ " <td>2000-02-29</td>\n",
156
+ " <td>80419.761984</td>\n",
157
+ " <td>NaN</td>\n",
158
+ " <td>NaN</td>\n",
159
+ " </tr>\n",
160
+ " <tr>\n",
161
+ " <th>2</th>\n",
162
+ " <td>3</td>\n",
163
+ " <td>48</td>\n",
164
+ " <td>Alaska</td>\n",
165
+ " <td>state</td>\n",
166
+ " <td>nan</td>\n",
167
+ " <td>1-Bedroom</td>\n",
168
+ " <td>all homes</td>\n",
169
+ " <td>2000-03-31</td>\n",
170
+ " <td>80480.449461</td>\n",
171
+ " <td>NaN</td>\n",
172
+ " <td>NaN</td>\n",
173
+ " </tr>\n",
174
+ " <tr>\n",
175
+ " <th>3</th>\n",
176
+ " <td>3</td>\n",
177
+ " <td>48</td>\n",
178
+ " <td>Alaska</td>\n",
179
+ " <td>state</td>\n",
180
+ " <td>nan</td>\n",
181
+ " <td>1-Bedroom</td>\n",
182
+ " <td>all homes</td>\n",
183
+ " <td>2000-04-30</td>\n",
184
+ " <td>79799.206525</td>\n",
185
+ " <td>NaN</td>\n",
186
+ " <td>NaN</td>\n",
187
+ " </tr>\n",
188
+ " <tr>\n",
189
+ " <th>4</th>\n",
190
+ " <td>3</td>\n",
191
+ " <td>48</td>\n",
192
+ " <td>Alaska</td>\n",
193
+ " <td>state</td>\n",
194
+ " <td>nan</td>\n",
195
+ " <td>1-Bedroom</td>\n",
196
+ " <td>all homes</td>\n",
197
+ " <td>2000-05-31</td>\n",
198
+ " <td>79666.469861</td>\n",
199
+ " <td>NaN</td>\n",
200
+ " <td>NaN</td>\n",
201
+ " </tr>\n",
202
+ " <tr>\n",
203
+ " <th>...</th>\n",
204
+ " <td>...</td>\n",
205
+ " <td>...</td>\n",
206
+ " <td>...</td>\n",
207
+ " <td>...</td>\n",
208
+ " <td>...</td>\n",
209
+ " <td>...</td>\n",
210
+ " <td>...</td>\n",
211
+ " <td>...</td>\n",
212
+ " <td>...</td>\n",
213
+ " <td>...</td>\n",
214
+ " <td>...</td>\n",
215
+ " </tr>\n",
216
+ " <tr>\n",
217
+ " <th>117907</th>\n",
218
+ " <td>62</td>\n",
219
+ " <td>51</td>\n",
220
+ " <td>Wyoming</td>\n",
221
+ " <td>state</td>\n",
222
+ " <td>nan</td>\n",
223
+ " <td>All Bedrooms</td>\n",
224
+ " <td>condo/co-op</td>\n",
225
+ " <td>2023-09-30</td>\n",
226
+ " <td>486974.735908</td>\n",
227
+ " <td>NaN</td>\n",
228
+ " <td>NaN</td>\n",
229
+ " </tr>\n",
230
+ " <tr>\n",
231
+ " <th>117908</th>\n",
232
+ " <td>62</td>\n",
233
+ " <td>51</td>\n",
234
+ " <td>Wyoming</td>\n",
235
+ " <td>state</td>\n",
236
+ " <td>nan</td>\n",
237
+ " <td>All Bedrooms</td>\n",
238
+ " <td>condo/co-op</td>\n",
239
+ " <td>2023-10-31</td>\n",
240
+ " <td>485847.539614</td>\n",
241
+ " <td>NaN</td>\n",
242
+ " <td>NaN</td>\n",
243
+ " </tr>\n",
244
+ " <tr>\n",
245
+ " <th>117909</th>\n",
246
+ " <td>62</td>\n",
247
+ " <td>51</td>\n",
248
+ " <td>Wyoming</td>\n",
249
+ " <td>state</td>\n",
250
+ " <td>nan</td>\n",
251
+ " <td>All Bedrooms</td>\n",
252
+ " <td>condo/co-op</td>\n",
253
+ " <td>2023-11-30</td>\n",
254
+ " <td>484223.885775</td>\n",
255
+ " <td>NaN</td>\n",
256
+ " <td>NaN</td>\n",
257
+ " </tr>\n",
258
+ " <tr>\n",
259
+ " <th>117910</th>\n",
260
+ " <td>62</td>\n",
261
+ " <td>51</td>\n",
262
+ " <td>Wyoming</td>\n",
263
+ " <td>state</td>\n",
264
+ " <td>nan</td>\n",
265
+ " <td>All Bedrooms</td>\n",
266
+ " <td>condo/co-op</td>\n",
267
+ " <td>2023-12-31</td>\n",
268
+ " <td>481522.403338</td>\n",
269
+ " <td>NaN</td>\n",
270
+ " <td>NaN</td>\n",
271
+ " </tr>\n",
272
+ " <tr>\n",
273
+ " <th>117911</th>\n",
274
+ " <td>62</td>\n",
275
+ " <td>51</td>\n",
276
+ " <td>Wyoming</td>\n",
277
+ " <td>state</td>\n",
278
+ " <td>nan</td>\n",
279
+ " <td>All Bedrooms</td>\n",
280
+ " <td>condo/co-op</td>\n",
281
+ " <td>2024-01-31</td>\n",
282
+ " <td>481181.718200</td>\n",
283
+ " <td>NaN</td>\n",
284
+ " <td>NaN</td>\n",
285
+ " </tr>\n",
286
+ " </tbody>\n",
287
+ "</table>\n",
288
+ "<p>117912 rows Γ— 11 columns</p>\n",
289
+ "</div>"
290
+ ],
291
+ "text/plain": [
292
+ " RegionID SizeRank RegionName RegionType StateName Bedroom Count \\\n",
293
+ "0 3 48 Alaska state nan 1-Bedroom \n",
294
+ "1 3 48 Alaska state nan 1-Bedroom \n",
295
+ "2 3 48 Alaska state nan 1-Bedroom \n",
296
+ "3 3 48 Alaska state nan 1-Bedroom \n",
297
+ "4 3 48 Alaska state nan 1-Bedroom \n",
298
+ "... ... ... ... ... ... ... \n",
299
+ "117907 62 51 Wyoming state nan All Bedrooms \n",
300
+ "117908 62 51 Wyoming state nan All Bedrooms \n",
301
+ "117909 62 51 Wyoming state nan All Bedrooms \n",
302
+ "117910 62 51 Wyoming state nan All Bedrooms \n",
303
+ "117911 62 51 Wyoming state nan All Bedrooms \n",
304
+ "\n",
305
+ " Home Type Date \\\n",
306
+ "0 all homes 2000-01-31 \n",
307
+ "1 all homes 2000-02-29 \n",
308
+ "2 all homes 2000-03-31 \n",
309
+ "3 all homes 2000-04-30 \n",
310
+ "4 all homes 2000-05-31 \n",
311
+ "... ... ... \n",
312
+ "117907 condo/co-op 2023-09-30 \n",
313
+ "117908 condo/co-op 2023-10-31 \n",
314
+ "117909 condo/co-op 2023-11-30 \n",
315
+ "117910 condo/co-op 2023-12-31 \n",
316
+ "117911 condo/co-op 2024-01-31 \n",
317
+ "\n",
318
+ " Mid Tier ZHVI (Smoothed) (Seasonally Adjusted) \\\n",
319
+ "0 81310.639504 \n",
320
+ "1 80419.761984 \n",
321
+ "2 80480.449461 \n",
322
+ "3 79799.206525 \n",
323
+ "4 79666.469861 \n",
324
+ "... ... \n",
325
+ "117907 486974.735908 \n",
326
+ "117908 485847.539614 \n",
327
+ "117909 484223.885775 \n",
328
+ "117910 481522.403338 \n",
329
+ "117911 481181.718200 \n",
330
+ "\n",
331
+ " Bottom Tier ZHVI (Smoothed) (Seasonally Adjusted) \\\n",
332
+ "0 NaN \n",
333
+ "1 NaN \n",
334
+ "2 NaN \n",
335
+ "3 NaN \n",
336
+ "4 NaN \n",
337
+ "... ... \n",
338
+ "117907 NaN \n",
339
+ "117908 NaN \n",
340
+ "117909 NaN \n",
341
+ "117910 NaN \n",
342
+ "117911 NaN \n",
343
+ "\n",
344
+ " Top Tier ZHVI (Smoothed) (Seasonally Adjusted) \n",
345
+ "0 NaN \n",
346
+ "1 NaN \n",
347
+ "2 NaN \n",
348
+ "3 NaN \n",
349
+ "4 NaN \n",
350
+ "... ... \n",
351
+ "117907 NaN \n",
352
+ "117908 NaN \n",
353
+ "117909 NaN \n",
354
+ "117910 NaN \n",
355
+ "117911 NaN \n",
356
+ "\n",
357
+ "[117912 rows x 11 columns]"
358
+ ]
359
+ },
360
+ "execution_count": 3,
361
+ "metadata": {},
362
+ "output_type": "execute_result"
363
+ }
364
+ ],
365
+ "source": [
366
+ "data_frames = []\n",
367
+ "\n",
368
+ "slug_column_mappings = {\n",
369
+ " \"_tier_0.0_0.33_\": \"Bottom Tier ZHVI\",\n",
370
+ " \"_tier_0.33_0.67_\": \"Mid Tier ZHVI\",\n",
371
+ " \"_tier_0.67_1.0_\": \"Top Tier ZHVI\",\n",
372
+ " \"\": \"ZHVI\",\n",
373
+ "}\n",
374
+ "\n",
375
+ "data_dir_path = get_data_path_for_config(CONFIG_NAME)\n",
376
+ "\n",
377
+ "for filename in os.listdir(data_dir_path):\n",
378
+ " if filename.endswith(\".csv\"):\n",
379
+ " print(\"processing \" + filename)\n",
380
+ " cur_df = pd.read_csv(os.path.join(data_dir_path, filename))\n",
381
+ " exclude_columns = [\n",
382
+ " \"RegionID\",\n",
383
+ " \"SizeRank\",\n",
384
+ " \"RegionName\",\n",
385
+ " \"RegionType\",\n",
386
+ " \"StateName\",\n",
387
+ " \"Bedroom Count\",\n",
388
+ " \"Home Type\",\n",
389
+ " ]\n",
390
+ "\n",
391
+ " if \"Zip\" in filename:\n",
392
+ " continue\n",
393
+ " if \"Neighborhood\" in filename:\n",
394
+ " continue\n",
395
+ " if \"City\" in filename:\n",
396
+ " continue\n",
397
+ " if \"Metro\" in filename:\n",
398
+ " continue\n",
399
+ " if \"County\" in filename:\n",
400
+ " continue\n",
401
+ "\n",
402
+ " if \"City\" in filename:\n",
403
+ " exclude_columns = exclude_columns + [\"State\", \"Metro\", \"CountyName\"]\n",
404
+ " elif \"Zip\" in filename:\n",
405
+ " exclude_columns = exclude_columns + [\n",
406
+ " \"State\",\n",
407
+ " \"City\",\n",
408
+ " \"Metro\",\n",
409
+ " \"CountyName\",\n",
410
+ " ]\n",
411
+ " elif \"County\" in filename:\n",
412
+ " exclude_columns = exclude_columns + [\n",
413
+ " \"State\",\n",
414
+ " \"Metro\",\n",
415
+ " \"StateCodeFIPS\",\n",
416
+ " \"MunicipalCodeFIPS\",\n",
417
+ " ]\n",
418
+ " elif \"Neighborhood\" in filename:\n",
419
+ " exclude_columns = exclude_columns + [\n",
420
+ " \"State\",\n",
421
+ " \"City\",\n",
422
+ " \"Metro\",\n",
423
+ " \"CountyName\",\n",
424
+ " ]\n",
425
+ "\n",
426
+ " if \"_bdrmcnt_1_\" in filename:\n",
427
+ " cur_df[\"Bedroom Count\"] = \"1-Bedroom\"\n",
428
+ " elif \"_bdrmcnt_2_\" in filename:\n",
429
+ " cur_df[\"Bedroom Count\"] = \"2-Bedrooms\"\n",
430
+ " elif \"_bdrmcnt_3_\" in filename:\n",
431
+ " cur_df[\"Bedroom Count\"] = \"3-Bedrooms\"\n",
432
+ " elif \"_bdrmcnt_4_\" in filename:\n",
433
+ " cur_df[\"Bedroom Count\"] = \"4-Bedrooms\"\n",
434
+ " elif \"_bdrmcnt_5_\" in filename:\n",
435
+ " cur_df[\"Bedroom Count\"] = \"5+-Bedrooms\"\n",
436
+ " else:\n",
437
+ " cur_df[\"Bedroom Count\"] = \"All Bedrooms\"\n",
438
+ "\n",
439
+ " cur_df = set_home_type(cur_df, filename)\n",
440
+ "\n",
441
+ " cur_df[\"StateName\"] = cur_df[\"StateName\"].astype(str)\n",
442
+ " cur_df[\"RegionName\"] = cur_df[\"RegionName\"].astype(str)\n",
443
+ "\n",
444
+ " data_frames = handle_slug_column_mappings(\n",
445
+ " data_frames, slug_column_mappings, exclude_columns, filename, cur_df\n",
446
+ " )\n",
447
+ "\n",
448
+ "\n",
449
+ "combined_df = get_combined_df(\n",
450
+ " data_frames,\n",
451
+ " [\n",
452
+ " \"RegionID\",\n",
453
+ " \"SizeRank\",\n",
454
+ " \"RegionName\",\n",
455
+ " \"RegionType\",\n",
456
+ " \"StateName\",\n",
457
+ " \"Bedroom Count\",\n",
458
+ " \"Home Type\",\n",
459
+ " \"Date\",\n",
460
+ " ],\n",
461
+ ")\n",
462
+ "\n",
463
+ "combined_df"
464
+ ]
465
+ },
466
+ {
467
+ "cell_type": "code",
468
+ "execution_count": 4,
469
+ "metadata": {},
470
+ "outputs": [
471
+ {
472
+ "data": {
473
+ "text/html": [
474
+ "<div>\n",
475
+ "<style scoped>\n",
476
+ " .dataframe tbody tr th:only-of-type {\n",
477
+ " vertical-align: middle;\n",
478
+ " }\n",
479
+ "\n",
480
+ " .dataframe tbody tr th {\n",
481
+ " vertical-align: top;\n",
482
+ " }\n",
483
+ "\n",
484
+ " .dataframe thead th {\n",
485
+ " text-align: right;\n",
486
+ " }\n",
487
+ "</style>\n",
488
+ "<table border=\"1\" class=\"dataframe\">\n",
489
+ " <thead>\n",
490
+ " <tr style=\"text-align: right;\">\n",
491
+ " <th></th>\n",
492
+ " <th>RegionID</th>\n",
493
+ " <th>SizeRank</th>\n",
494
+ " <th>RegionName</th>\n",
495
+ " <th>RegionType</th>\n",
496
+ " <th>StateName</th>\n",
497
+ " <th>Bedroom Count</th>\n",
498
+ " <th>Home Type</th>\n",
499
+ " <th>Date</th>\n",
500
+ " <th>Mid Tier ZHVI (Smoothed) (Seasonally Adjusted)</th>\n",
501
+ " <th>Bottom Tier ZHVI (Smoothed) (Seasonally Adjusted)</th>\n",
502
+ " <th>Top Tier ZHVI (Smoothed) (Seasonally Adjusted)</th>\n",
503
+ " </tr>\n",
504
+ " </thead>\n",
505
+ " <tbody>\n",
506
+ " <tr>\n",
507
+ " <th>0</th>\n",
508
+ " <td>3</td>\n",
509
+ " <td>48</td>\n",
510
+ " <td>Alaska</td>\n",
511
+ " <td>state</td>\n",
512
+ " <td>Alaska</td>\n",
513
+ " <td>1-Bedroom</td>\n",
514
+ " <td>all homes</td>\n",
515
+ " <td>2000-01-31</td>\n",
516
+ " <td>81310.639504</td>\n",
517
+ " <td>NaN</td>\n",
518
+ " <td>NaN</td>\n",
519
+ " </tr>\n",
520
+ " <tr>\n",
521
+ " <th>1</th>\n",
522
+ " <td>3</td>\n",
523
+ " <td>48</td>\n",
524
+ " <td>Alaska</td>\n",
525
+ " <td>state</td>\n",
526
+ " <td>Alaska</td>\n",
527
+ " <td>1-Bedroom</td>\n",
528
+ " <td>all homes</td>\n",
529
+ " <td>2000-02-29</td>\n",
530
+ " <td>80419.761984</td>\n",
531
+ " <td>NaN</td>\n",
532
+ " <td>NaN</td>\n",
533
+ " </tr>\n",
534
+ " <tr>\n",
535
+ " <th>2</th>\n",
536
+ " <td>3</td>\n",
537
+ " <td>48</td>\n",
538
+ " <td>Alaska</td>\n",
539
+ " <td>state</td>\n",
540
+ " <td>Alaska</td>\n",
541
+ " <td>1-Bedroom</td>\n",
542
+ " <td>all homes</td>\n",
543
+ " <td>2000-03-31</td>\n",
544
+ " <td>80480.449461</td>\n",
545
+ " <td>NaN</td>\n",
546
+ " <td>NaN</td>\n",
547
+ " </tr>\n",
548
+ " <tr>\n",
549
+ " <th>3</th>\n",
550
+ " <td>3</td>\n",
551
+ " <td>48</td>\n",
552
+ " <td>Alaska</td>\n",
553
+ " <td>state</td>\n",
554
+ " <td>Alaska</td>\n",
555
+ " <td>1-Bedroom</td>\n",
556
+ " <td>all homes</td>\n",
557
+ " <td>2000-04-30</td>\n",
558
+ " <td>79799.206525</td>\n",
559
+ " <td>NaN</td>\n",
560
+ " <td>NaN</td>\n",
561
+ " </tr>\n",
562
+ " <tr>\n",
563
+ " <th>4</th>\n",
564
+ " <td>3</td>\n",
565
+ " <td>48</td>\n",
566
+ " <td>Alaska</td>\n",
567
+ " <td>state</td>\n",
568
+ " <td>Alaska</td>\n",
569
+ " <td>1-Bedroom</td>\n",
570
+ " <td>all homes</td>\n",
571
+ " <td>2000-05-31</td>\n",
572
+ " <td>79666.469861</td>\n",
573
+ " <td>NaN</td>\n",
574
+ " <td>NaN</td>\n",
575
+ " </tr>\n",
576
+ " <tr>\n",
577
+ " <th>...</th>\n",
578
+ " <td>...</td>\n",
579
+ " <td>...</td>\n",
580
+ " <td>...</td>\n",
581
+ " <td>...</td>\n",
582
+ " <td>...</td>\n",
583
+ " <td>...</td>\n",
584
+ " <td>...</td>\n",
585
+ " <td>...</td>\n",
586
+ " <td>...</td>\n",
587
+ " <td>...</td>\n",
588
+ " <td>...</td>\n",
589
+ " </tr>\n",
590
+ " <tr>\n",
591
+ " <th>117907</th>\n",
592
+ " <td>62</td>\n",
593
+ " <td>51</td>\n",
594
+ " <td>Wyoming</td>\n",
595
+ " <td>state</td>\n",
596
+ " <td>Wyoming</td>\n",
597
+ " <td>All Bedrooms</td>\n",
598
+ " <td>condo/co-op</td>\n",
599
+ " <td>2023-09-30</td>\n",
600
+ " <td>486974.735908</td>\n",
601
+ " <td>NaN</td>\n",
602
+ " <td>NaN</td>\n",
603
+ " </tr>\n",
604
+ " <tr>\n",
605
+ " <th>117908</th>\n",
606
+ " <td>62</td>\n",
607
+ " <td>51</td>\n",
608
+ " <td>Wyoming</td>\n",
609
+ " <td>state</td>\n",
610
+ " <td>Wyoming</td>\n",
611
+ " <td>All Bedrooms</td>\n",
612
+ " <td>condo/co-op</td>\n",
613
+ " <td>2023-10-31</td>\n",
614
+ " <td>485847.539614</td>\n",
615
+ " <td>NaN</td>\n",
616
+ " <td>NaN</td>\n",
617
+ " </tr>\n",
618
+ " <tr>\n",
619
+ " <th>117909</th>\n",
620
+ " <td>62</td>\n",
621
+ " <td>51</td>\n",
622
+ " <td>Wyoming</td>\n",
623
+ " <td>state</td>\n",
624
+ " <td>Wyoming</td>\n",
625
+ " <td>All Bedrooms</td>\n",
626
+ " <td>condo/co-op</td>\n",
627
+ " <td>2023-11-30</td>\n",
628
+ " <td>484223.885775</td>\n",
629
+ " <td>NaN</td>\n",
630
+ " <td>NaN</td>\n",
631
+ " </tr>\n",
632
+ " <tr>\n",
633
+ " <th>117910</th>\n",
634
+ " <td>62</td>\n",
635
+ " <td>51</td>\n",
636
+ " <td>Wyoming</td>\n",
637
+ " <td>state</td>\n",
638
+ " <td>Wyoming</td>\n",
639
+ " <td>All Bedrooms</td>\n",
640
+ " <td>condo/co-op</td>\n",
641
+ " <td>2023-12-31</td>\n",
642
+ " <td>481522.403338</td>\n",
643
+ " <td>NaN</td>\n",
644
+ " <td>NaN</td>\n",
645
+ " </tr>\n",
646
+ " <tr>\n",
647
+ " <th>117911</th>\n",
648
+ " <td>62</td>\n",
649
+ " <td>51</td>\n",
650
+ " <td>Wyoming</td>\n",
651
+ " <td>state</td>\n",
652
+ " <td>Wyoming</td>\n",
653
+ " <td>All Bedrooms</td>\n",
654
+ " <td>condo/co-op</td>\n",
655
+ " <td>2024-01-31</td>\n",
656
+ " <td>481181.718200</td>\n",
657
+ " <td>NaN</td>\n",
658
+ " <td>NaN</td>\n",
659
+ " </tr>\n",
660
+ " </tbody>\n",
661
+ "</table>\n",
662
+ "<p>117912 rows Γ— 11 columns</p>\n",
663
+ "</div>"
664
+ ],
665
+ "text/plain": [
666
+ " RegionID SizeRank RegionName RegionType StateName Bedroom Count \\\n",
667
+ "0 3 48 Alaska state Alaska 1-Bedroom \n",
668
+ "1 3 48 Alaska state Alaska 1-Bedroom \n",
669
+ "2 3 48 Alaska state Alaska 1-Bedroom \n",
670
+ "3 3 48 Alaska state Alaska 1-Bedroom \n",
671
+ "4 3 48 Alaska state Alaska 1-Bedroom \n",
672
+ "... ... ... ... ... ... ... \n",
673
+ "117907 62 51 Wyoming state Wyoming All Bedrooms \n",
674
+ "117908 62 51 Wyoming state Wyoming All Bedrooms \n",
675
+ "117909 62 51 Wyoming state Wyoming All Bedrooms \n",
676
+ "117910 62 51 Wyoming state Wyoming All Bedrooms \n",
677
+ "117911 62 51 Wyoming state Wyoming All Bedrooms \n",
678
+ "\n",
679
+ " Home Type Date \\\n",
680
+ "0 all homes 2000-01-31 \n",
681
+ "1 all homes 2000-02-29 \n",
682
+ "2 all homes 2000-03-31 \n",
683
+ "3 all homes 2000-04-30 \n",
684
+ "4 all homes 2000-05-31 \n",
685
+ "... ... ... \n",
686
+ "117907 condo/co-op 2023-09-30 \n",
687
+ "117908 condo/co-op 2023-10-31 \n",
688
+ "117909 condo/co-op 2023-11-30 \n",
689
+ "117910 condo/co-op 2023-12-31 \n",
690
+ "117911 condo/co-op 2024-01-31 \n",
691
+ "\n",
692
+ " Mid Tier ZHVI (Smoothed) (Seasonally Adjusted) \\\n",
693
+ "0 81310.639504 \n",
694
+ "1 80419.761984 \n",
695
+ "2 80480.449461 \n",
696
+ "3 79799.206525 \n",
697
+ "4 79666.469861 \n",
698
+ "... ... \n",
699
+ "117907 486974.735908 \n",
700
+ "117908 485847.539614 \n",
701
+ "117909 484223.885775 \n",
702
+ "117910 481522.403338 \n",
703
+ "117911 481181.718200 \n",
704
+ "\n",
705
+ " Bottom Tier ZHVI (Smoothed) (Seasonally Adjusted) \\\n",
706
+ "0 NaN \n",
707
+ "1 NaN \n",
708
+ "2 NaN \n",
709
+ "3 NaN \n",
710
+ "4 NaN \n",
711
+ "... ... \n",
712
+ "117907 NaN \n",
713
+ "117908 NaN \n",
714
+ "117909 NaN \n",
715
+ "117910 NaN \n",
716
+ "117911 NaN \n",
717
+ "\n",
718
+ " Top Tier ZHVI (Smoothed) (Seasonally Adjusted) \n",
719
+ "0 NaN \n",
720
+ "1 NaN \n",
721
+ "2 NaN \n",
722
+ "3 NaN \n",
723
+ "4 NaN \n",
724
+ "... ... \n",
725
+ "117907 NaN \n",
726
+ "117908 NaN \n",
727
+ "117909 NaN \n",
728
+ "117910 NaN \n",
729
+ "117911 NaN \n",
730
+ "\n",
731
+ "[117912 rows x 11 columns]"
732
+ ]
733
+ },
734
+ "execution_count": 4,
735
+ "metadata": {},
736
+ "output_type": "execute_result"
737
+ }
738
+ ],
739
+ "source": [
740
+ "final_df = combined_df\n",
741
+ "\n",
742
+ "for index, row in final_df.iterrows():\n",
743
+ " if row[\"RegionType\"] == \"city\":\n",
744
+ " final_df.at[index, \"City\"] = row[\"RegionName\"]\n",
745
+ " elif row[\"RegionType\"] == \"county\":\n",
746
+ " final_df.at[index, \"County\"] = row[\"RegionName\"]\n",
747
+ " if row[\"RegionType\"] == \"state\":\n",
748
+ " final_df.at[index, \"StateName\"] = row[\"RegionName\"]\n",
749
+ "\n",
750
+ "# coalesce State and StateName columns\n",
751
+ "# final_df[\"State\"] = final_df[\"State\"].combine_first(final_df[\"StateName\"])\n",
752
+ "# final_df[\"County\"] = final_df[\"County\"].combine_first(final_df[\"CountyName\"])\n",
753
+ "\n",
754
+ "# final_df = final_df.drop(\n",
755
+ "# columns=[\n",
756
+ "# \"StateName\",\n",
757
+ "# # \"CountyName\"\n",
758
+ "# ]\n",
759
+ "# )\n",
760
+ "final_df"
761
+ ]
762
+ },
763
+ {
764
+ "cell_type": "code",
765
+ "execution_count": 5,
766
+ "metadata": {},
767
+ "outputs": [
768
+ {
769
+ "data": {
770
+ "text/html": [
771
+ "<div>\n",
772
+ "<style scoped>\n",
773
+ " .dataframe tbody tr th:only-of-type {\n",
774
+ " vertical-align: middle;\n",
775
+ " }\n",
776
+ "\n",
777
+ " .dataframe tbody tr th {\n",
778
+ " vertical-align: top;\n",
779
+ " }\n",
780
+ "\n",
781
+ " .dataframe thead th {\n",
782
+ " text-align: right;\n",
783
+ " }\n",
784
+ "</style>\n",
785
+ "<table border=\"1\" class=\"dataframe\">\n",
786
+ " <thead>\n",
787
+ " <tr style=\"text-align: right;\">\n",
788
+ " <th></th>\n",
789
+ " <th>Region ID</th>\n",
790
+ " <th>Size Rank</th>\n",
791
+ " <th>Region</th>\n",
792
+ " <th>Region Type</th>\n",
793
+ " <th>State</th>\n",
794
+ " <th>Bedroom Count</th>\n",
795
+ " <th>Home Type</th>\n",
796
+ " <th>Date</th>\n",
797
+ " <th>Mid Tier ZHVI (Smoothed) (Seasonally Adjusted)</th>\n",
798
+ " <th>Bottom Tier ZHVI (Smoothed) (Seasonally Adjusted)</th>\n",
799
+ " <th>Top Tier ZHVI (Smoothed) (Seasonally Adjusted)</th>\n",
800
+ " </tr>\n",
801
+ " </thead>\n",
802
+ " <tbody>\n",
803
+ " <tr>\n",
804
+ " <th>0</th>\n",
805
+ " <td>3</td>\n",
806
+ " <td>48</td>\n",
807
+ " <td>Alaska</td>\n",
808
+ " <td>state</td>\n",
809
+ " <td>Alaska</td>\n",
810
+ " <td>1-Bedroom</td>\n",
811
+ " <td>all homes</td>\n",
812
+ " <td>2000-01-31</td>\n",
813
+ " <td>81310.639504</td>\n",
814
+ " <td>NaN</td>\n",
815
+ " <td>NaN</td>\n",
816
+ " </tr>\n",
817
+ " <tr>\n",
818
+ " <th>1</th>\n",
819
+ " <td>3</td>\n",
820
+ " <td>48</td>\n",
821
+ " <td>Alaska</td>\n",
822
+ " <td>state</td>\n",
823
+ " <td>Alaska</td>\n",
824
+ " <td>1-Bedroom</td>\n",
825
+ " <td>all homes</td>\n",
826
+ " <td>2000-02-29</td>\n",
827
+ " <td>80419.761984</td>\n",
828
+ " <td>NaN</td>\n",
829
+ " <td>NaN</td>\n",
830
+ " </tr>\n",
831
+ " <tr>\n",
832
+ " <th>2</th>\n",
833
+ " <td>3</td>\n",
834
+ " <td>48</td>\n",
835
+ " <td>Alaska</td>\n",
836
+ " <td>state</td>\n",
837
+ " <td>Alaska</td>\n",
838
+ " <td>1-Bedroom</td>\n",
839
+ " <td>all homes</td>\n",
840
+ " <td>2000-03-31</td>\n",
841
+ " <td>80480.449461</td>\n",
842
+ " <td>NaN</td>\n",
843
+ " <td>NaN</td>\n",
844
+ " </tr>\n",
845
+ " <tr>\n",
846
+ " <th>3</th>\n",
847
+ " <td>3</td>\n",
848
+ " <td>48</td>\n",
849
+ " <td>Alaska</td>\n",
850
+ " <td>state</td>\n",
851
+ " <td>Alaska</td>\n",
852
+ " <td>1-Bedroom</td>\n",
853
+ " <td>all homes</td>\n",
854
+ " <td>2000-04-30</td>\n",
855
+ " <td>79799.206525</td>\n",
856
+ " <td>NaN</td>\n",
857
+ " <td>NaN</td>\n",
858
+ " </tr>\n",
859
+ " <tr>\n",
860
+ " <th>4</th>\n",
861
+ " <td>3</td>\n",
862
+ " <td>48</td>\n",
863
+ " <td>Alaska</td>\n",
864
+ " <td>state</td>\n",
865
+ " <td>Alaska</td>\n",
866
+ " <td>1-Bedroom</td>\n",
867
+ " <td>all homes</td>\n",
868
+ " <td>2000-05-31</td>\n",
869
+ " <td>79666.469861</td>\n",
870
+ " <td>NaN</td>\n",
871
+ " <td>NaN</td>\n",
872
+ " </tr>\n",
873
+ " <tr>\n",
874
+ " <th>...</th>\n",
875
+ " <td>...</td>\n",
876
+ " <td>...</td>\n",
877
+ " <td>...</td>\n",
878
+ " <td>...</td>\n",
879
+ " <td>...</td>\n",
880
+ " <td>...</td>\n",
881
+ " <td>...</td>\n",
882
+ " <td>...</td>\n",
883
+ " <td>...</td>\n",
884
+ " <td>...</td>\n",
885
+ " <td>...</td>\n",
886
+ " </tr>\n",
887
+ " <tr>\n",
888
+ " <th>117907</th>\n",
889
+ " <td>62</td>\n",
890
+ " <td>51</td>\n",
891
+ " <td>Wyoming</td>\n",
892
+ " <td>state</td>\n",
893
+ " <td>Wyoming</td>\n",
894
+ " <td>All Bedrooms</td>\n",
895
+ " <td>condo/co-op</td>\n",
896
+ " <td>2023-09-30</td>\n",
897
+ " <td>486974.735908</td>\n",
898
+ " <td>NaN</td>\n",
899
+ " <td>NaN</td>\n",
900
+ " </tr>\n",
901
+ " <tr>\n",
902
+ " <th>117908</th>\n",
903
+ " <td>62</td>\n",
904
+ " <td>51</td>\n",
905
+ " <td>Wyoming</td>\n",
906
+ " <td>state</td>\n",
907
+ " <td>Wyoming</td>\n",
908
+ " <td>All Bedrooms</td>\n",
909
+ " <td>condo/co-op</td>\n",
910
+ " <td>2023-10-31</td>\n",
911
+ " <td>485847.539614</td>\n",
912
+ " <td>NaN</td>\n",
913
+ " <td>NaN</td>\n",
914
+ " </tr>\n",
915
+ " <tr>\n",
916
+ " <th>117909</th>\n",
917
+ " <td>62</td>\n",
918
+ " <td>51</td>\n",
919
+ " <td>Wyoming</td>\n",
920
+ " <td>state</td>\n",
921
+ " <td>Wyoming</td>\n",
922
+ " <td>All Bedrooms</td>\n",
923
+ " <td>condo/co-op</td>\n",
924
+ " <td>2023-11-30</td>\n",
925
+ " <td>484223.885775</td>\n",
926
+ " <td>NaN</td>\n",
927
+ " <td>NaN</td>\n",
928
+ " </tr>\n",
929
+ " <tr>\n",
930
+ " <th>117910</th>\n",
931
+ " <td>62</td>\n",
932
+ " <td>51</td>\n",
933
+ " <td>Wyoming</td>\n",
934
+ " <td>state</td>\n",
935
+ " <td>Wyoming</td>\n",
936
+ " <td>All Bedrooms</td>\n",
937
+ " <td>condo/co-op</td>\n",
938
+ " <td>2023-12-31</td>\n",
939
+ " <td>481522.403338</td>\n",
940
+ " <td>NaN</td>\n",
941
+ " <td>NaN</td>\n",
942
+ " </tr>\n",
943
+ " <tr>\n",
944
+ " <th>117911</th>\n",
945
+ " <td>62</td>\n",
946
+ " <td>51</td>\n",
947
+ " <td>Wyoming</td>\n",
948
+ " <td>state</td>\n",
949
+ " <td>Wyoming</td>\n",
950
+ " <td>All Bedrooms</td>\n",
951
+ " <td>condo/co-op</td>\n",
952
+ " <td>2024-01-31</td>\n",
953
+ " <td>481181.718200</td>\n",
954
+ " <td>NaN</td>\n",
955
+ " <td>NaN</td>\n",
956
+ " </tr>\n",
957
+ " </tbody>\n",
958
+ "</table>\n",
959
+ "<p>117912 rows Γ— 11 columns</p>\n",
960
+ "</div>"
961
+ ],
962
+ "text/plain": [
963
+ " Region ID Size Rank Region Region Type State Bedroom Count \\\n",
964
+ "0 3 48 Alaska state Alaska 1-Bedroom \n",
965
+ "1 3 48 Alaska state Alaska 1-Bedroom \n",
966
+ "2 3 48 Alaska state Alaska 1-Bedroom \n",
967
+ "3 3 48 Alaska state Alaska 1-Bedroom \n",
968
+ "4 3 48 Alaska state Alaska 1-Bedroom \n",
969
+ "... ... ... ... ... ... ... \n",
970
+ "117907 62 51 Wyoming state Wyoming All Bedrooms \n",
971
+ "117908 62 51 Wyoming state Wyoming All Bedrooms \n",
972
+ "117909 62 51 Wyoming state Wyoming All Bedrooms \n",
973
+ "117910 62 51 Wyoming state Wyoming All Bedrooms \n",
974
+ "117911 62 51 Wyoming state Wyoming All Bedrooms \n",
975
+ "\n",
976
+ " Home Type Date \\\n",
977
+ "0 all homes 2000-01-31 \n",
978
+ "1 all homes 2000-02-29 \n",
979
+ "2 all homes 2000-03-31 \n",
980
+ "3 all homes 2000-04-30 \n",
981
+ "4 all homes 2000-05-31 \n",
982
+ "... ... ... \n",
983
+ "117907 condo/co-op 2023-09-30 \n",
984
+ "117908 condo/co-op 2023-10-31 \n",
985
+ "117909 condo/co-op 2023-11-30 \n",
986
+ "117910 condo/co-op 2023-12-31 \n",
987
+ "117911 condo/co-op 2024-01-31 \n",
988
+ "\n",
989
+ " Mid Tier ZHVI (Smoothed) (Seasonally Adjusted) \\\n",
990
+ "0 81310.639504 \n",
991
+ "1 80419.761984 \n",
992
+ "2 80480.449461 \n",
993
+ "3 79799.206525 \n",
994
+ "4 79666.469861 \n",
995
+ "... ... \n",
996
+ "117907 486974.735908 \n",
997
+ "117908 485847.539614 \n",
998
+ "117909 484223.885775 \n",
999
+ "117910 481522.403338 \n",
1000
+ "117911 481181.718200 \n",
1001
+ "\n",
1002
+ " Bottom Tier ZHVI (Smoothed) (Seasonally Adjusted) \\\n",
1003
+ "0 NaN \n",
1004
+ "1 NaN \n",
1005
+ "2 NaN \n",
1006
+ "3 NaN \n",
1007
+ "4 NaN \n",
1008
+ "... ... \n",
1009
+ "117907 NaN \n",
1010
+ "117908 NaN \n",
1011
+ "117909 NaN \n",
1012
+ "117910 NaN \n",
1013
+ "117911 NaN \n",
1014
+ "\n",
1015
+ " Top Tier ZHVI (Smoothed) (Seasonally Adjusted) \n",
1016
+ "0 NaN \n",
1017
+ "1 NaN \n",
1018
+ "2 NaN \n",
1019
+ "3 NaN \n",
1020
+ "4 NaN \n",
1021
+ "... ... \n",
1022
+ "117907 NaN \n",
1023
+ "117908 NaN \n",
1024
+ "117909 NaN \n",
1025
+ "117910 NaN \n",
1026
+ "117911 NaN \n",
1027
+ "\n",
1028
+ "[117912 rows x 11 columns]"
1029
+ ]
1030
+ },
1031
+ "execution_count": 5,
1032
+ "metadata": {},
1033
+ "output_type": "execute_result"
1034
+ }
1035
+ ],
1036
+ "source": [
1037
+ "final_df = final_df.rename(\n",
1038
+ " columns={\n",
1039
+ " \"RegionID\": \"Region ID\",\n",
1040
+ " \"SizeRank\": \"Size Rank\",\n",
1041
+ " \"RegionName\": \"Region\",\n",
1042
+ " \"RegionType\": \"Region Type\",\n",
1043
+ " \"StateCodeFIPS\": \"State Code FIPS\",\n",
1044
+ " \"StateName\": \"State\",\n",
1045
+ " \"MunicipalCodeFIPS\": \"Municipal Code FIPS\",\n",
1046
+ " }\n",
1047
+ ")\n",
1048
+ "\n",
1049
+ "final_df[\"Date\"] = pd.to_datetime(final_df[\"Date\"], format=\"%Y-%m-%d\")\n",
1050
+ "\n",
1051
+ "final_df"
1052
+ ]
1053
+ },
1054
+ {
1055
+ "cell_type": "code",
1056
+ "execution_count": 6,
1057
+ "metadata": {},
1058
+ "outputs": [],
1059
+ "source": [
1060
+ "save_final_df_as_jsonl(CONFIG_NAME, final_df)"
1061
+ ]
1062
+ }
1063
+ ],
1064
+ "metadata": {
1065
+ "kernelspec": {
1066
+ "display_name": "Python 3",
1067
+ "language": "python",
1068
+ "name": "python3"
1069
+ },
1070
+ "language_info": {
1071
+ "codemirror_mode": {
1072
+ "name": "ipython",
1073
+ "version": 3
1074
+ },
1075
+ "file_extension": ".py",
1076
+ "mimetype": "text/x-python",
1077
+ "name": "python",
1078
+ "nbconvert_exporter": "python",
1079
+ "pygments_lexer": "ipython3",
1080
+ "version": "3.12.2"
1081
+ }
1082
+ },
1083
+ "nbformat": 4,
1084
+ "nbformat_minor": 2
1085
+ }
processors/home_values.py ADDED
@@ -0,0 +1,178 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ # coding: utf-8
3
+
4
+ # In[1]:
5
+
6
+
7
+ import pandas as pd
8
+ import os
9
+
10
+ from helpers import (
11
+ get_data_path_for_config,
12
+ get_combined_df,
13
+ save_final_df_as_jsonl,
14
+ handle_slug_column_mappings,
15
+ set_home_type,
16
+ )
17
+
18
+
19
+ # In[2]:
20
+
21
+
22
+ CONFIG_NAME = "home_values"
23
+
24
+
25
+ # In[3]:
26
+
27
+
28
+ data_frames = []
29
+
30
+ slug_column_mappings = {
31
+ "_tier_0.0_0.33_": "Bottom Tier ZHVI",
32
+ "_tier_0.33_0.67_": "Mid Tier ZHVI",
33
+ "_tier_0.67_1.0_": "Top Tier ZHVI",
34
+ "": "ZHVI",
35
+ }
36
+
37
+ data_dir_path = get_data_path_for_config(CONFIG_NAME)
38
+
39
+ for filename in os.listdir(data_dir_path):
40
+ if filename.endswith(".csv"):
41
+ print("processing " + filename)
42
+ cur_df = pd.read_csv(os.path.join(data_dir_path, filename))
43
+ exclude_columns = [
44
+ "RegionID",
45
+ "SizeRank",
46
+ "RegionName",
47
+ "RegionType",
48
+ "StateName",
49
+ "Bedroom Count",
50
+ "Home Type",
51
+ ]
52
+
53
+ if "Zip" in filename:
54
+ continue
55
+ if "Neighborhood" in filename:
56
+ continue
57
+ if "City" in filename:
58
+ continue
59
+ if "Metro" in filename:
60
+ continue
61
+ if "County" in filename:
62
+ continue
63
+
64
+ if "City" in filename:
65
+ exclude_columns = exclude_columns + ["State", "Metro", "CountyName"]
66
+ elif "Zip" in filename:
67
+ exclude_columns = exclude_columns + [
68
+ "State",
69
+ "City",
70
+ "Metro",
71
+ "CountyName",
72
+ ]
73
+ elif "County" in filename:
74
+ exclude_columns = exclude_columns + [
75
+ "State",
76
+ "Metro",
77
+ "StateCodeFIPS",
78
+ "MunicipalCodeFIPS",
79
+ ]
80
+ elif "Neighborhood" in filename:
81
+ exclude_columns = exclude_columns + [
82
+ "State",
83
+ "City",
84
+ "Metro",
85
+ "CountyName",
86
+ ]
87
+
88
+ if "_bdrmcnt_1_" in filename:
89
+ cur_df["Bedroom Count"] = "1-Bedroom"
90
+ elif "_bdrmcnt_2_" in filename:
91
+ cur_df["Bedroom Count"] = "2-Bedrooms"
92
+ elif "_bdrmcnt_3_" in filename:
93
+ cur_df["Bedroom Count"] = "3-Bedrooms"
94
+ elif "_bdrmcnt_4_" in filename:
95
+ cur_df["Bedroom Count"] = "4-Bedrooms"
96
+ elif "_bdrmcnt_5_" in filename:
97
+ cur_df["Bedroom Count"] = "5+-Bedrooms"
98
+ else:
99
+ cur_df["Bedroom Count"] = "All Bedrooms"
100
+
101
+ cur_df = set_home_type(cur_df, filename)
102
+
103
+ cur_df["StateName"] = cur_df["StateName"].astype(str)
104
+ cur_df["RegionName"] = cur_df["RegionName"].astype(str)
105
+
106
+ data_frames = handle_slug_column_mappings(
107
+ data_frames, slug_column_mappings, exclude_columns, filename, cur_df
108
+ )
109
+
110
+
111
+ combined_df = get_combined_df(
112
+ data_frames,
113
+ [
114
+ "RegionID",
115
+ "SizeRank",
116
+ "RegionName",
117
+ "RegionType",
118
+ "StateName",
119
+ "Bedroom Count",
120
+ "Home Type",
121
+ "Date",
122
+ ],
123
+ )
124
+
125
+ combined_df
126
+
127
+
128
+ # In[4]:
129
+
130
+
131
+ final_df = combined_df
132
+
133
+ for index, row in final_df.iterrows():
134
+ if row["RegionType"] == "city":
135
+ final_df.at[index, "City"] = row["RegionName"]
136
+ elif row["RegionType"] == "county":
137
+ final_df.at[index, "County"] = row["RegionName"]
138
+ if row["RegionType"] == "state":
139
+ final_df.at[index, "StateName"] = row["RegionName"]
140
+
141
+ # coalesce State and StateName columns
142
+ # final_df["State"] = final_df["State"].combine_first(final_df["StateName"])
143
+ # final_df["County"] = final_df["County"].combine_first(final_df["CountyName"])
144
+
145
+ # final_df = final_df.drop(
146
+ # columns=[
147
+ # "StateName",
148
+ # # "CountyName"
149
+ # ]
150
+ # )
151
+ final_df
152
+
153
+
154
+ # In[5]:
155
+
156
+
157
+ final_df = final_df.rename(
158
+ columns={
159
+ "RegionID": "Region ID",
160
+ "SizeRank": "Size Rank",
161
+ "RegionName": "Region",
162
+ "RegionType": "Region Type",
163
+ "StateCodeFIPS": "State Code FIPS",
164
+ "StateName": "State",
165
+ "MunicipalCodeFIPS": "Municipal Code FIPS",
166
+ }
167
+ )
168
+
169
+ final_df["Date"] = pd.to_datetime(final_df["Date"], format="%Y-%m-%d")
170
+
171
+ final_df
172
+
173
+
174
+ # In[6]:
175
+
176
+
177
+ save_final_df_as_jsonl(CONFIG_NAME, final_df)
178
+
processors/home_values_forecasts.ipynb ADDED
@@ -0,0 +1,816 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 1,
6
+ "metadata": {},
7
+ "outputs": [],
8
+ "source": [
9
+ "import pandas as pd\n",
10
+ "import os\n",
11
+ "\n",
12
+ "from helpers import get_data_path_for_config, get_combined_df, save_final_df_as_jsonl"
13
+ ]
14
+ },
15
+ {
16
+ "cell_type": "code",
17
+ "execution_count": 2,
18
+ "metadata": {},
19
+ "outputs": [],
20
+ "source": [
21
+ "CONFIG_NAME = \"home_values_forecasts\""
22
+ ]
23
+ },
24
+ {
25
+ "cell_type": "code",
26
+ "execution_count": 3,
27
+ "metadata": {},
28
+ "outputs": [
29
+ {
30
+ "name": "stdout",
31
+ "output_type": "stream",
32
+ "text": [
33
+ "processing Zip_zhvf_growth_uc_sfrcondo_tier_0.33_0.67_sm_sa_month.csv\n",
34
+ "processing Metro_zhvf_growth_uc_sfrcondo_tier_0.33_0.67_sm_sa_month.csv\n",
35
+ "processing Zip_zhvf_growth_uc_sfrcondo_tier_0.33_0.67_month.csv\n",
36
+ "processing Metro_zhvf_growth_uc_sfrcondo_tier_0.33_0.67_month.csv\n"
37
+ ]
38
+ },
39
+ {
40
+ "data": {
41
+ "text/html": [
42
+ "<div>\n",
43
+ "<style scoped>\n",
44
+ " .dataframe tbody tr th:only-of-type {\n",
45
+ " vertical-align: middle;\n",
46
+ " }\n",
47
+ "\n",
48
+ " .dataframe tbody tr th {\n",
49
+ " vertical-align: top;\n",
50
+ " }\n",
51
+ "\n",
52
+ " .dataframe thead th {\n",
53
+ " text-align: right;\n",
54
+ " }\n",
55
+ "</style>\n",
56
+ "<table border=\"1\" class=\"dataframe\">\n",
57
+ " <thead>\n",
58
+ " <tr style=\"text-align: right;\">\n",
59
+ " <th></th>\n",
60
+ " <th>RegionID</th>\n",
61
+ " <th>SizeRank</th>\n",
62
+ " <th>RegionName</th>\n",
63
+ " <th>RegionType</th>\n",
64
+ " <th>StateName</th>\n",
65
+ " <th>State</th>\n",
66
+ " <th>City</th>\n",
67
+ " <th>Metro</th>\n",
68
+ " <th>CountyName</th>\n",
69
+ " <th>BaseDate</th>\n",
70
+ " <th>Month Over Month % (Smoothed) (Seasonally Adjusted)</th>\n",
71
+ " <th>Quarter Over Quarter % (Smoothed) (Seasonally Adjusted)</th>\n",
72
+ " <th>Year Over Year % (Smoothed) (Seasonally Adjusted)</th>\n",
73
+ " <th>Month Over Month %</th>\n",
74
+ " <th>Quarter Over Quarter %</th>\n",
75
+ " <th>Year Over Year %</th>\n",
76
+ " </tr>\n",
77
+ " </thead>\n",
78
+ " <tbody>\n",
79
+ " <tr>\n",
80
+ " <th>0</th>\n",
81
+ " <td>58001</td>\n",
82
+ " <td>30490</td>\n",
83
+ " <td>501</td>\n",
84
+ " <td>zip</td>\n",
85
+ " <td>NY</td>\n",
86
+ " <td>NY</td>\n",
87
+ " <td>Holtsville</td>\n",
88
+ " <td>New York-Newark-Jersey City, NY-NJ-PA</td>\n",
89
+ " <td>Suffolk County</td>\n",
90
+ " <td>2023-12-31</td>\n",
91
+ " <td>NaN</td>\n",
92
+ " <td>NaN</td>\n",
93
+ " <td>NaN</td>\n",
94
+ " <td>-0.7</td>\n",
95
+ " <td>-0.9</td>\n",
96
+ " <td>0.6</td>\n",
97
+ " </tr>\n",
98
+ " <tr>\n",
99
+ " <th>1</th>\n",
100
+ " <td>58002</td>\n",
101
+ " <td>30490</td>\n",
102
+ " <td>544</td>\n",
103
+ " <td>zip</td>\n",
104
+ " <td>NY</td>\n",
105
+ " <td>NY</td>\n",
106
+ " <td>Holtsville</td>\n",
107
+ " <td>New York-Newark-Jersey City, NY-NJ-PA</td>\n",
108
+ " <td>Suffolk County</td>\n",
109
+ " <td>2023-12-31</td>\n",
110
+ " <td>NaN</td>\n",
111
+ " <td>NaN</td>\n",
112
+ " <td>NaN</td>\n",
113
+ " <td>-0.7</td>\n",
114
+ " <td>-0.9</td>\n",
115
+ " <td>0.6</td>\n",
116
+ " </tr>\n",
117
+ " <tr>\n",
118
+ " <th>2</th>\n",
119
+ " <td>58196</td>\n",
120
+ " <td>7440</td>\n",
121
+ " <td>1001</td>\n",
122
+ " <td>zip</td>\n",
123
+ " <td>MA</td>\n",
124
+ " <td>MA</td>\n",
125
+ " <td>Agawam</td>\n",
126
+ " <td>Springfield, MA</td>\n",
127
+ " <td>Hampden County</td>\n",
128
+ " <td>2023-12-31</td>\n",
129
+ " <td>0.4</td>\n",
130
+ " <td>0.9</td>\n",
131
+ " <td>3.2</td>\n",
132
+ " <td>-0.6</td>\n",
133
+ " <td>0.0</td>\n",
134
+ " <td>3.0</td>\n",
135
+ " </tr>\n",
136
+ " <tr>\n",
137
+ " <th>3</th>\n",
138
+ " <td>58197</td>\n",
139
+ " <td>3911</td>\n",
140
+ " <td>1002</td>\n",
141
+ " <td>zip</td>\n",
142
+ " <td>MA</td>\n",
143
+ " <td>MA</td>\n",
144
+ " <td>Amherst</td>\n",
145
+ " <td>Springfield, MA</td>\n",
146
+ " <td>Hampshire County</td>\n",
147
+ " <td>2023-12-31</td>\n",
148
+ " <td>0.2</td>\n",
149
+ " <td>0.7</td>\n",
150
+ " <td>2.7</td>\n",
151
+ " <td>-0.6</td>\n",
152
+ " <td>0.0</td>\n",
153
+ " <td>2.9</td>\n",
154
+ " </tr>\n",
155
+ " <tr>\n",
156
+ " <th>4</th>\n",
157
+ " <td>58198</td>\n",
158
+ " <td>8838</td>\n",
159
+ " <td>1003</td>\n",
160
+ " <td>zip</td>\n",
161
+ " <td>MA</td>\n",
162
+ " <td>MA</td>\n",
163
+ " <td>Amherst</td>\n",
164
+ " <td>Springfield, MA</td>\n",
165
+ " <td>Hampshire County</td>\n",
166
+ " <td>2023-12-31</td>\n",
167
+ " <td>NaN</td>\n",
168
+ " <td>NaN</td>\n",
169
+ " <td>NaN</td>\n",
170
+ " <td>-0.7</td>\n",
171
+ " <td>0.0</td>\n",
172
+ " <td>3.4</td>\n",
173
+ " </tr>\n",
174
+ " <tr>\n",
175
+ " <th>...</th>\n",
176
+ " <td>...</td>\n",
177
+ " <td>...</td>\n",
178
+ " <td>...</td>\n",
179
+ " <td>...</td>\n",
180
+ " <td>...</td>\n",
181
+ " <td>...</td>\n",
182
+ " <td>...</td>\n",
183
+ " <td>...</td>\n",
184
+ " <td>...</td>\n",
185
+ " <td>...</td>\n",
186
+ " <td>...</td>\n",
187
+ " <td>...</td>\n",
188
+ " <td>...</td>\n",
189
+ " <td>...</td>\n",
190
+ " <td>...</td>\n",
191
+ " <td>...</td>\n",
192
+ " </tr>\n",
193
+ " <tr>\n",
194
+ " <th>31849</th>\n",
195
+ " <td>827279</td>\n",
196
+ " <td>7779</td>\n",
197
+ " <td>72405</td>\n",
198
+ " <td>zip</td>\n",
199
+ " <td>AR</td>\n",
200
+ " <td>AR</td>\n",
201
+ " <td>Jonesboro</td>\n",
202
+ " <td>Jonesboro, AR</td>\n",
203
+ " <td>Craighead County</td>\n",
204
+ " <td>2023-12-31</td>\n",
205
+ " <td>NaN</td>\n",
206
+ " <td>NaN</td>\n",
207
+ " <td>NaN</td>\n",
208
+ " <td>-0.7</td>\n",
209
+ " <td>0.0</td>\n",
210
+ " <td>2.5</td>\n",
211
+ " </tr>\n",
212
+ " <tr>\n",
213
+ " <th>31850</th>\n",
214
+ " <td>834213</td>\n",
215
+ " <td>30490</td>\n",
216
+ " <td>11437</td>\n",
217
+ " <td>zip</td>\n",
218
+ " <td>NY</td>\n",
219
+ " <td>NY</td>\n",
220
+ " <td>New York</td>\n",
221
+ " <td>New York-Newark-Jersey City, NY-NJ-PA</td>\n",
222
+ " <td>Queens County</td>\n",
223
+ " <td>2023-12-31</td>\n",
224
+ " <td>NaN</td>\n",
225
+ " <td>NaN</td>\n",
226
+ " <td>NaN</td>\n",
227
+ " <td>-0.7</td>\n",
228
+ " <td>-0.9</td>\n",
229
+ " <td>0.6</td>\n",
230
+ " </tr>\n",
231
+ " <tr>\n",
232
+ " <th>31851</th>\n",
233
+ " <td>845914</td>\n",
234
+ " <td>6361</td>\n",
235
+ " <td>85288</td>\n",
236
+ " <td>zip</td>\n",
237
+ " <td>AZ</td>\n",
238
+ " <td>AZ</td>\n",
239
+ " <td>Tempe</td>\n",
240
+ " <td>Phoenix-Mesa-Chandler, AZ</td>\n",
241
+ " <td>Maricopa County</td>\n",
242
+ " <td>2023-12-31</td>\n",
243
+ " <td>NaN</td>\n",
244
+ " <td>NaN</td>\n",
245
+ " <td>NaN</td>\n",
246
+ " <td>-1.0</td>\n",
247
+ " <td>0.0</td>\n",
248
+ " <td>4.5</td>\n",
249
+ " </tr>\n",
250
+ " <tr>\n",
251
+ " <th>31852</th>\n",
252
+ " <td>847854</td>\n",
253
+ " <td>39992</td>\n",
254
+ " <td>20598</td>\n",
255
+ " <td>zip</td>\n",
256
+ " <td>VA</td>\n",
257
+ " <td>VA</td>\n",
258
+ " <td>Arlington</td>\n",
259
+ " <td>Washington-Arlington-Alexandria, DC-VA-MD-WV</td>\n",
260
+ " <td>Arlington County</td>\n",
261
+ " <td>2023-12-31</td>\n",
262
+ " <td>NaN</td>\n",
263
+ " <td>NaN</td>\n",
264
+ " <td>NaN</td>\n",
265
+ " <td>-0.4</td>\n",
266
+ " <td>0.9</td>\n",
267
+ " <td>1.2</td>\n",
268
+ " </tr>\n",
269
+ " <tr>\n",
270
+ " <th>31853</th>\n",
271
+ " <td>847855</td>\n",
272
+ " <td>30490</td>\n",
273
+ " <td>34249</td>\n",
274
+ " <td>zip</td>\n",
275
+ " <td>FL</td>\n",
276
+ " <td>FL</td>\n",
277
+ " <td>Sarasota</td>\n",
278
+ " <td>North Port-Sarasota-Bradenton, FL</td>\n",
279
+ " <td>Sarasota County</td>\n",
280
+ " <td>2023-12-31</td>\n",
281
+ " <td>NaN</td>\n",
282
+ " <td>NaN</td>\n",
283
+ " <td>NaN</td>\n",
284
+ " <td>-0.9</td>\n",
285
+ " <td>-0.1</td>\n",
286
+ " <td>5.4</td>\n",
287
+ " </tr>\n",
288
+ " </tbody>\n",
289
+ "</table>\n",
290
+ "<p>31854 rows Γ— 16 columns</p>\n",
291
+ "</div>"
292
+ ],
293
+ "text/plain": [
294
+ " RegionID SizeRank RegionName RegionType StateName State City \\\n",
295
+ "0 58001 30490 501 zip NY NY Holtsville \n",
296
+ "1 58002 30490 544 zip NY NY Holtsville \n",
297
+ "2 58196 7440 1001 zip MA MA Agawam \n",
298
+ "3 58197 3911 1002 zip MA MA Amherst \n",
299
+ "4 58198 8838 1003 zip MA MA Amherst \n",
300
+ "... ... ... ... ... ... ... ... \n",
301
+ "31849 827279 7779 72405 zip AR AR Jonesboro \n",
302
+ "31850 834213 30490 11437 zip NY NY New York \n",
303
+ "31851 845914 6361 85288 zip AZ AZ Tempe \n",
304
+ "31852 847854 39992 20598 zip VA VA Arlington \n",
305
+ "31853 847855 30490 34249 zip FL FL Sarasota \n",
306
+ "\n",
307
+ " Metro CountyName \\\n",
308
+ "0 New York-Newark-Jersey City, NY-NJ-PA Suffolk County \n",
309
+ "1 New York-Newark-Jersey City, NY-NJ-PA Suffolk County \n",
310
+ "2 Springfield, MA Hampden County \n",
311
+ "3 Springfield, MA Hampshire County \n",
312
+ "4 Springfield, MA Hampshire County \n",
313
+ "... ... ... \n",
314
+ "31849 Jonesboro, AR Craighead County \n",
315
+ "31850 New York-Newark-Jersey City, NY-NJ-PA Queens County \n",
316
+ "31851 Phoenix-Mesa-Chandler, AZ Maricopa County \n",
317
+ "31852 Washington-Arlington-Alexandria, DC-VA-MD-WV Arlington County \n",
318
+ "31853 North Port-Sarasota-Bradenton, FL Sarasota County \n",
319
+ "\n",
320
+ " BaseDate Month Over Month % (Smoothed) (Seasonally Adjusted) \\\n",
321
+ "0 2023-12-31 NaN \n",
322
+ "1 2023-12-31 NaN \n",
323
+ "2 2023-12-31 0.4 \n",
324
+ "3 2023-12-31 0.2 \n",
325
+ "4 2023-12-31 NaN \n",
326
+ "... ... ... \n",
327
+ "31849 2023-12-31 NaN \n",
328
+ "31850 2023-12-31 NaN \n",
329
+ "31851 2023-12-31 NaN \n",
330
+ "31852 2023-12-31 NaN \n",
331
+ "31853 2023-12-31 NaN \n",
332
+ "\n",
333
+ " Quarter Over Quarter % (Smoothed) (Seasonally Adjusted) \\\n",
334
+ "0 NaN \n",
335
+ "1 NaN \n",
336
+ "2 0.9 \n",
337
+ "3 0.7 \n",
338
+ "4 NaN \n",
339
+ "... ... \n",
340
+ "31849 NaN \n",
341
+ "31850 NaN \n",
342
+ "31851 NaN \n",
343
+ "31852 NaN \n",
344
+ "31853 NaN \n",
345
+ "\n",
346
+ " Year Over Year % (Smoothed) (Seasonally Adjusted) Month Over Month % \\\n",
347
+ "0 NaN -0.7 \n",
348
+ "1 NaN -0.7 \n",
349
+ "2 3.2 -0.6 \n",
350
+ "3 2.7 -0.6 \n",
351
+ "4 NaN -0.7 \n",
352
+ "... ... ... \n",
353
+ "31849 NaN -0.7 \n",
354
+ "31850 NaN -0.7 \n",
355
+ "31851 NaN -1.0 \n",
356
+ "31852 NaN -0.4 \n",
357
+ "31853 NaN -0.9 \n",
358
+ "\n",
359
+ " Quarter Over Quarter % Year Over Year % \n",
360
+ "0 -0.9 0.6 \n",
361
+ "1 -0.9 0.6 \n",
362
+ "2 0.0 3.0 \n",
363
+ "3 0.0 2.9 \n",
364
+ "4 0.0 3.4 \n",
365
+ "... ... ... \n",
366
+ "31849 0.0 2.5 \n",
367
+ "31850 -0.9 0.6 \n",
368
+ "31851 0.0 4.5 \n",
369
+ "31852 0.9 1.2 \n",
370
+ "31853 -0.1 5.4 \n",
371
+ "\n",
372
+ "[31854 rows x 16 columns]"
373
+ ]
374
+ },
375
+ "execution_count": 3,
376
+ "metadata": {},
377
+ "output_type": "execute_result"
378
+ }
379
+ ],
380
+ "source": [
381
+ "data_frames = []\n",
382
+ "\n",
383
+ "data_dir_path = get_data_path_for_config(CONFIG_NAME)\n",
384
+ "\n",
385
+ "for filename in os.listdir(data_dir_path):\n",
386
+ " if filename.endswith(\".csv\"):\n",
387
+ " print(\"processing \" + filename)\n",
388
+ " cur_df = pd.read_csv(os.path.join(data_dir_path, filename))\n",
389
+ "\n",
390
+ " cols = [\"Month Over Month %\", \"Quarter Over Quarter %\", \"Year Over Year %\"]\n",
391
+ " if filename.endswith(\"sm_sa_month.csv\"):\n",
392
+ " # print('Smoothed')\n",
393
+ " cur_df.columns = list(cur_df.columns[:-3]) + [\n",
394
+ " x + \" (Smoothed) (Seasonally Adjusted)\" for x in cols\n",
395
+ " ]\n",
396
+ " else:\n",
397
+ " # print('Raw')\n",
398
+ " cur_df.columns = list(cur_df.columns[:-3]) + cols\n",
399
+ "\n",
400
+ " cur_df[\"RegionName\"] = cur_df[\"RegionName\"].astype(str)\n",
401
+ "\n",
402
+ " data_frames.append(cur_df)\n",
403
+ "\n",
404
+ "\n",
405
+ "combined_df = get_combined_df(\n",
406
+ " data_frames,\n",
407
+ " [\n",
408
+ " \"RegionID\",\n",
409
+ " \"RegionType\",\n",
410
+ " \"SizeRank\",\n",
411
+ " \"StateName\",\n",
412
+ " \"BaseDate\",\n",
413
+ " ],\n",
414
+ ")\n",
415
+ "\n",
416
+ "combined_df"
417
+ ]
418
+ },
419
+ {
420
+ "cell_type": "code",
421
+ "execution_count": 4,
422
+ "metadata": {},
423
+ "outputs": [
424
+ {
425
+ "data": {
426
+ "text/html": [
427
+ "<div>\n",
428
+ "<style scoped>\n",
429
+ " .dataframe tbody tr th:only-of-type {\n",
430
+ " vertical-align: middle;\n",
431
+ " }\n",
432
+ "\n",
433
+ " .dataframe tbody tr th {\n",
434
+ " vertical-align: top;\n",
435
+ " }\n",
436
+ "\n",
437
+ " .dataframe thead th {\n",
438
+ " text-align: right;\n",
439
+ " }\n",
440
+ "</style>\n",
441
+ "<table border=\"1\" class=\"dataframe\">\n",
442
+ " <thead>\n",
443
+ " <tr style=\"text-align: right;\">\n",
444
+ " <th></th>\n",
445
+ " <th>Region ID</th>\n",
446
+ " <th>Size Rank</th>\n",
447
+ " <th>Region</th>\n",
448
+ " <th>Region Type</th>\n",
449
+ " <th>State</th>\n",
450
+ " <th>City</th>\n",
451
+ " <th>Metro</th>\n",
452
+ " <th>County</th>\n",
453
+ " <th>Date</th>\n",
454
+ " <th>Month Over Month % (Smoothed) (Seasonally Adjusted)</th>\n",
455
+ " <th>Quarter Over Quarter % (Smoothed) (Seasonally Adjusted)</th>\n",
456
+ " <th>Year Over Year % (Smoothed) (Seasonally Adjusted)</th>\n",
457
+ " <th>Month Over Month %</th>\n",
458
+ " <th>Quarter Over Quarter %</th>\n",
459
+ " <th>Year Over Year %</th>\n",
460
+ " </tr>\n",
461
+ " </thead>\n",
462
+ " <tbody>\n",
463
+ " <tr>\n",
464
+ " <th>0</th>\n",
465
+ " <td>58001</td>\n",
466
+ " <td>30490</td>\n",
467
+ " <td>501</td>\n",
468
+ " <td>zip</td>\n",
469
+ " <td>NY</td>\n",
470
+ " <td>Holtsville</td>\n",
471
+ " <td>New York-Newark-Jersey City, NY-NJ-PA</td>\n",
472
+ " <td>Suffolk County</td>\n",
473
+ " <td>2023-12-31</td>\n",
474
+ " <td>NaN</td>\n",
475
+ " <td>NaN</td>\n",
476
+ " <td>NaN</td>\n",
477
+ " <td>-0.7</td>\n",
478
+ " <td>-0.9</td>\n",
479
+ " <td>0.6</td>\n",
480
+ " </tr>\n",
481
+ " <tr>\n",
482
+ " <th>1</th>\n",
483
+ " <td>58002</td>\n",
484
+ " <td>30490</td>\n",
485
+ " <td>544</td>\n",
486
+ " <td>zip</td>\n",
487
+ " <td>NY</td>\n",
488
+ " <td>Holtsville</td>\n",
489
+ " <td>New York-Newark-Jersey City, NY-NJ-PA</td>\n",
490
+ " <td>Suffolk County</td>\n",
491
+ " <td>2023-12-31</td>\n",
492
+ " <td>NaN</td>\n",
493
+ " <td>NaN</td>\n",
494
+ " <td>NaN</td>\n",
495
+ " <td>-0.7</td>\n",
496
+ " <td>-0.9</td>\n",
497
+ " <td>0.6</td>\n",
498
+ " </tr>\n",
499
+ " <tr>\n",
500
+ " <th>2</th>\n",
501
+ " <td>58196</td>\n",
502
+ " <td>7440</td>\n",
503
+ " <td>1001</td>\n",
504
+ " <td>zip</td>\n",
505
+ " <td>MA</td>\n",
506
+ " <td>Agawam</td>\n",
507
+ " <td>Springfield, MA</td>\n",
508
+ " <td>Hampden County</td>\n",
509
+ " <td>2023-12-31</td>\n",
510
+ " <td>0.4</td>\n",
511
+ " <td>0.9</td>\n",
512
+ " <td>3.2</td>\n",
513
+ " <td>-0.6</td>\n",
514
+ " <td>0.0</td>\n",
515
+ " <td>3.0</td>\n",
516
+ " </tr>\n",
517
+ " <tr>\n",
518
+ " <th>3</th>\n",
519
+ " <td>58197</td>\n",
520
+ " <td>3911</td>\n",
521
+ " <td>1002</td>\n",
522
+ " <td>zip</td>\n",
523
+ " <td>MA</td>\n",
524
+ " <td>Amherst</td>\n",
525
+ " <td>Springfield, MA</td>\n",
526
+ " <td>Hampshire County</td>\n",
527
+ " <td>2023-12-31</td>\n",
528
+ " <td>0.2</td>\n",
529
+ " <td>0.7</td>\n",
530
+ " <td>2.7</td>\n",
531
+ " <td>-0.6</td>\n",
532
+ " <td>0.0</td>\n",
533
+ " <td>2.9</td>\n",
534
+ " </tr>\n",
535
+ " <tr>\n",
536
+ " <th>4</th>\n",
537
+ " <td>58198</td>\n",
538
+ " <td>8838</td>\n",
539
+ " <td>1003</td>\n",
540
+ " <td>zip</td>\n",
541
+ " <td>MA</td>\n",
542
+ " <td>Amherst</td>\n",
543
+ " <td>Springfield, MA</td>\n",
544
+ " <td>Hampshire County</td>\n",
545
+ " <td>2023-12-31</td>\n",
546
+ " <td>NaN</td>\n",
547
+ " <td>NaN</td>\n",
548
+ " <td>NaN</td>\n",
549
+ " <td>-0.7</td>\n",
550
+ " <td>0.0</td>\n",
551
+ " <td>3.4</td>\n",
552
+ " </tr>\n",
553
+ " <tr>\n",
554
+ " <th>...</th>\n",
555
+ " <td>...</td>\n",
556
+ " <td>...</td>\n",
557
+ " <td>...</td>\n",
558
+ " <td>...</td>\n",
559
+ " <td>...</td>\n",
560
+ " <td>...</td>\n",
561
+ " <td>...</td>\n",
562
+ " <td>...</td>\n",
563
+ " <td>...</td>\n",
564
+ " <td>...</td>\n",
565
+ " <td>...</td>\n",
566
+ " <td>...</td>\n",
567
+ " <td>...</td>\n",
568
+ " <td>...</td>\n",
569
+ " <td>...</td>\n",
570
+ " </tr>\n",
571
+ " <tr>\n",
572
+ " <th>31849</th>\n",
573
+ " <td>827279</td>\n",
574
+ " <td>7779</td>\n",
575
+ " <td>72405</td>\n",
576
+ " <td>zip</td>\n",
577
+ " <td>AR</td>\n",
578
+ " <td>Jonesboro</td>\n",
579
+ " <td>Jonesboro, AR</td>\n",
580
+ " <td>Craighead County</td>\n",
581
+ " <td>2023-12-31</td>\n",
582
+ " <td>NaN</td>\n",
583
+ " <td>NaN</td>\n",
584
+ " <td>NaN</td>\n",
585
+ " <td>-0.7</td>\n",
586
+ " <td>0.0</td>\n",
587
+ " <td>2.5</td>\n",
588
+ " </tr>\n",
589
+ " <tr>\n",
590
+ " <th>31850</th>\n",
591
+ " <td>834213</td>\n",
592
+ " <td>30490</td>\n",
593
+ " <td>11437</td>\n",
594
+ " <td>zip</td>\n",
595
+ " <td>NY</td>\n",
596
+ " <td>New York</td>\n",
597
+ " <td>New York-Newark-Jersey City, NY-NJ-PA</td>\n",
598
+ " <td>Queens County</td>\n",
599
+ " <td>2023-12-31</td>\n",
600
+ " <td>NaN</td>\n",
601
+ " <td>NaN</td>\n",
602
+ " <td>NaN</td>\n",
603
+ " <td>-0.7</td>\n",
604
+ " <td>-0.9</td>\n",
605
+ " <td>0.6</td>\n",
606
+ " </tr>\n",
607
+ " <tr>\n",
608
+ " <th>31851</th>\n",
609
+ " <td>845914</td>\n",
610
+ " <td>6361</td>\n",
611
+ " <td>85288</td>\n",
612
+ " <td>zip</td>\n",
613
+ " <td>AZ</td>\n",
614
+ " <td>Tempe</td>\n",
615
+ " <td>Phoenix-Mesa-Chandler, AZ</td>\n",
616
+ " <td>Maricopa County</td>\n",
617
+ " <td>2023-12-31</td>\n",
618
+ " <td>NaN</td>\n",
619
+ " <td>NaN</td>\n",
620
+ " <td>NaN</td>\n",
621
+ " <td>-1.0</td>\n",
622
+ " <td>0.0</td>\n",
623
+ " <td>4.5</td>\n",
624
+ " </tr>\n",
625
+ " <tr>\n",
626
+ " <th>31852</th>\n",
627
+ " <td>847854</td>\n",
628
+ " <td>39992</td>\n",
629
+ " <td>20598</td>\n",
630
+ " <td>zip</td>\n",
631
+ " <td>VA</td>\n",
632
+ " <td>Arlington</td>\n",
633
+ " <td>Washington-Arlington-Alexandria, DC-VA-MD-WV</td>\n",
634
+ " <td>Arlington County</td>\n",
635
+ " <td>2023-12-31</td>\n",
636
+ " <td>NaN</td>\n",
637
+ " <td>NaN</td>\n",
638
+ " <td>NaN</td>\n",
639
+ " <td>-0.4</td>\n",
640
+ " <td>0.9</td>\n",
641
+ " <td>1.2</td>\n",
642
+ " </tr>\n",
643
+ " <tr>\n",
644
+ " <th>31853</th>\n",
645
+ " <td>847855</td>\n",
646
+ " <td>30490</td>\n",
647
+ " <td>34249</td>\n",
648
+ " <td>zip</td>\n",
649
+ " <td>FL</td>\n",
650
+ " <td>Sarasota</td>\n",
651
+ " <td>North Port-Sarasota-Bradenton, FL</td>\n",
652
+ " <td>Sarasota County</td>\n",
653
+ " <td>2023-12-31</td>\n",
654
+ " <td>NaN</td>\n",
655
+ " <td>NaN</td>\n",
656
+ " <td>NaN</td>\n",
657
+ " <td>-0.9</td>\n",
658
+ " <td>-0.1</td>\n",
659
+ " <td>5.4</td>\n",
660
+ " </tr>\n",
661
+ " </tbody>\n",
662
+ "</table>\n",
663
+ "<p>31854 rows Γ— 15 columns</p>\n",
664
+ "</div>"
665
+ ],
666
+ "text/plain": [
667
+ " Region ID Size Rank Region Region Type State City \\\n",
668
+ "0 58001 30490 501 zip NY Holtsville \n",
669
+ "1 58002 30490 544 zip NY Holtsville \n",
670
+ "2 58196 7440 1001 zip MA Agawam \n",
671
+ "3 58197 3911 1002 zip MA Amherst \n",
672
+ "4 58198 8838 1003 zip MA Amherst \n",
673
+ "... ... ... ... ... ... ... \n",
674
+ "31849 827279 7779 72405 zip AR Jonesboro \n",
675
+ "31850 834213 30490 11437 zip NY New York \n",
676
+ "31851 845914 6361 85288 zip AZ Tempe \n",
677
+ "31852 847854 39992 20598 zip VA Arlington \n",
678
+ "31853 847855 30490 34249 zip FL Sarasota \n",
679
+ "\n",
680
+ " Metro County \\\n",
681
+ "0 New York-Newark-Jersey City, NY-NJ-PA Suffolk County \n",
682
+ "1 New York-Newark-Jersey City, NY-NJ-PA Suffolk County \n",
683
+ "2 Springfield, MA Hampden County \n",
684
+ "3 Springfield, MA Hampshire County \n",
685
+ "4 Springfield, MA Hampshire County \n",
686
+ "... ... ... \n",
687
+ "31849 Jonesboro, AR Craighead County \n",
688
+ "31850 New York-Newark-Jersey City, NY-NJ-PA Queens County \n",
689
+ "31851 Phoenix-Mesa-Chandler, AZ Maricopa County \n",
690
+ "31852 Washington-Arlington-Alexandria, DC-VA-MD-WV Arlington County \n",
691
+ "31853 North Port-Sarasota-Bradenton, FL Sarasota County \n",
692
+ "\n",
693
+ " Date Month Over Month % (Smoothed) (Seasonally Adjusted) \\\n",
694
+ "0 2023-12-31 NaN \n",
695
+ "1 2023-12-31 NaN \n",
696
+ "2 2023-12-31 0.4 \n",
697
+ "3 2023-12-31 0.2 \n",
698
+ "4 2023-12-31 NaN \n",
699
+ "... ... ... \n",
700
+ "31849 2023-12-31 NaN \n",
701
+ "31850 2023-12-31 NaN \n",
702
+ "31851 2023-12-31 NaN \n",
703
+ "31852 2023-12-31 NaN \n",
704
+ "31853 2023-12-31 NaN \n",
705
+ "\n",
706
+ " Quarter Over Quarter % (Smoothed) (Seasonally Adjusted) \\\n",
707
+ "0 NaN \n",
708
+ "1 NaN \n",
709
+ "2 0.9 \n",
710
+ "3 0.7 \n",
711
+ "4 NaN \n",
712
+ "... ... \n",
713
+ "31849 NaN \n",
714
+ "31850 NaN \n",
715
+ "31851 NaN \n",
716
+ "31852 NaN \n",
717
+ "31853 NaN \n",
718
+ "\n",
719
+ " Year Over Year % (Smoothed) (Seasonally Adjusted) Month Over Month % \\\n",
720
+ "0 NaN -0.7 \n",
721
+ "1 NaN -0.7 \n",
722
+ "2 3.2 -0.6 \n",
723
+ "3 2.7 -0.6 \n",
724
+ "4 NaN -0.7 \n",
725
+ "... ... ... \n",
726
+ "31849 NaN -0.7 \n",
727
+ "31850 NaN -0.7 \n",
728
+ "31851 NaN -1.0 \n",
729
+ "31852 NaN -0.4 \n",
730
+ "31853 NaN -0.9 \n",
731
+ "\n",
732
+ " Quarter Over Quarter % Year Over Year % \n",
733
+ "0 -0.9 0.6 \n",
734
+ "1 -0.9 0.6 \n",
735
+ "2 0.0 3.0 \n",
736
+ "3 0.0 2.9 \n",
737
+ "4 0.0 3.4 \n",
738
+ "... ... ... \n",
739
+ "31849 0.0 2.5 \n",
740
+ "31850 -0.9 0.6 \n",
741
+ "31851 0.0 4.5 \n",
742
+ "31852 0.9 1.2 \n",
743
+ "31853 -0.1 5.4 \n",
744
+ "\n",
745
+ "[31854 rows x 15 columns]"
746
+ ]
747
+ },
748
+ "execution_count": 4,
749
+ "metadata": {},
750
+ "output_type": "execute_result"
751
+ }
752
+ ],
753
+ "source": [
754
+ "# Adjust columns\n",
755
+ "final_df = combined_df\n",
756
+ "final_df = combined_df.drop(\"StateName\", axis=1)\n",
757
+ "final_df = final_df.rename(\n",
758
+ " columns={\n",
759
+ " \"CountyName\": \"County\",\n",
760
+ " \"BaseDate\": \"Date\",\n",
761
+ " \"RegionName\": \"Region\",\n",
762
+ " \"RegionType\": \"Region Type\",\n",
763
+ " \"RegionID\": \"Region ID\",\n",
764
+ " \"SizeRank\": \"Size Rank\",\n",
765
+ " }\n",
766
+ ")\n",
767
+ "\n",
768
+ "# iterate over rows of final_df and populate State and City columns if the regionType is msa\n",
769
+ "for index, row in final_df.iterrows():\n",
770
+ " if row[\"Region Type\"] == \"msa\":\n",
771
+ " regionName = row[\"Region\"]\n",
772
+ " # final_df.at[index, 'Metro'] = regionName\n",
773
+ "\n",
774
+ " city = regionName.split(\", \")[0]\n",
775
+ " final_df.at[index, \"City\"] = city\n",
776
+ "\n",
777
+ " state = regionName.split(\", \")[1]\n",
778
+ " final_df.at[index, \"State\"] = state\n",
779
+ "\n",
780
+ "final_df[\"Date\"] = pd.to_datetime(final_df[\"Date\"], format=\"%Y-%m-%d\")\n",
781
+ "\n",
782
+ "final_df"
783
+ ]
784
+ },
785
+ {
786
+ "cell_type": "code",
787
+ "execution_count": 5,
788
+ "metadata": {},
789
+ "outputs": [],
790
+ "source": [
791
+ "save_final_df_as_jsonl(CONFIG_NAME, final_df)"
792
+ ]
793
+ }
794
+ ],
795
+ "metadata": {
796
+ "kernelspec": {
797
+ "display_name": "Python 3",
798
+ "language": "python",
799
+ "name": "python3"
800
+ },
801
+ "language_info": {
802
+ "codemirror_mode": {
803
+ "name": "ipython",
804
+ "version": 3
805
+ },
806
+ "file_extension": ".py",
807
+ "mimetype": "text/x-python",
808
+ "name": "python",
809
+ "nbconvert_exporter": "python",
810
+ "pygments_lexer": "ipython3",
811
+ "version": "3.12.2"
812
+ }
813
+ },
814
+ "nbformat": 4,
815
+ "nbformat_minor": 2
816
+ }
processors/home_values_forecasts.py ADDED
@@ -0,0 +1,98 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ # coding: utf-8
3
+
4
+ # In[1]:
5
+
6
+
7
+ import pandas as pd
8
+ import os
9
+
10
+ from helpers import get_data_path_for_config, get_combined_df, save_final_df_as_jsonl
11
+
12
+
13
+ # In[2]:
14
+
15
+
16
+ CONFIG_NAME = "home_values_forecasts"
17
+
18
+
19
+ # In[3]:
20
+
21
+
22
+ data_frames = []
23
+
24
+ data_dir_path = get_data_path_for_config(CONFIG_NAME)
25
+
26
+ for filename in os.listdir(data_dir_path):
27
+ if filename.endswith(".csv"):
28
+ print("processing " + filename)
29
+ cur_df = pd.read_csv(os.path.join(data_dir_path, filename))
30
+
31
+ cols = ["Month Over Month %", "Quarter Over Quarter %", "Year Over Year %"]
32
+ if filename.endswith("sm_sa_month.csv"):
33
+ # print('Smoothed')
34
+ cur_df.columns = list(cur_df.columns[:-3]) + [
35
+ x + " (Smoothed) (Seasonally Adjusted)" for x in cols
36
+ ]
37
+ else:
38
+ # print('Raw')
39
+ cur_df.columns = list(cur_df.columns[:-3]) + cols
40
+
41
+ cur_df["RegionName"] = cur_df["RegionName"].astype(str)
42
+
43
+ data_frames.append(cur_df)
44
+
45
+
46
+ combined_df = get_combined_df(
47
+ data_frames,
48
+ [
49
+ "RegionID",
50
+ "RegionType",
51
+ "SizeRank",
52
+ "StateName",
53
+ "BaseDate",
54
+ ],
55
+ )
56
+
57
+ combined_df
58
+
59
+
60
+ # In[4]:
61
+
62
+
63
+ # Adjust columns
64
+ final_df = combined_df
65
+ final_df = combined_df.drop("StateName", axis=1)
66
+ final_df = final_df.rename(
67
+ columns={
68
+ "CountyName": "County",
69
+ "BaseDate": "Date",
70
+ "RegionName": "Region",
71
+ "RegionType": "Region Type",
72
+ "RegionID": "Region ID",
73
+ "SizeRank": "Size Rank",
74
+ }
75
+ )
76
+
77
+ # iterate over rows of final_df and populate State and City columns if the regionType is msa
78
+ for index, row in final_df.iterrows():
79
+ if row["Region Type"] == "msa":
80
+ regionName = row["Region"]
81
+ # final_df.at[index, 'Metro'] = regionName
82
+
83
+ city = regionName.split(", ")[0]
84
+ final_df.at[index, "City"] = city
85
+
86
+ state = regionName.split(", ")[1]
87
+ final_df.at[index, "State"] = state
88
+
89
+ final_df["Date"] = pd.to_datetime(final_df["Date"], format="%Y-%m-%d")
90
+
91
+ final_df
92
+
93
+
94
+ # In[5]:
95
+
96
+
97
+ save_final_df_as_jsonl(CONFIG_NAME, final_df)
98
+
processors/new_construction.ipynb ADDED
@@ -0,0 +1,585 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 1,
6
+ "metadata": {},
7
+ "outputs": [],
8
+ "source": [
9
+ "import pandas as pd\n",
10
+ "import os\n",
11
+ "\n",
12
+ "from helpers import (\n",
13
+ " get_data_path_for_config,\n",
14
+ " get_combined_df,\n",
15
+ " save_final_df_as_jsonl,\n",
16
+ " handle_slug_column_mappings,\n",
17
+ " set_home_type,\n",
18
+ ")"
19
+ ]
20
+ },
21
+ {
22
+ "cell_type": "code",
23
+ "execution_count": 2,
24
+ "metadata": {},
25
+ "outputs": [],
26
+ "source": [
27
+ "CONFIG_NAME = \"new_construction\""
28
+ ]
29
+ },
30
+ {
31
+ "cell_type": "code",
32
+ "execution_count": 3,
33
+ "metadata": {},
34
+ "outputs": [
35
+ {
36
+ "name": "stdout",
37
+ "output_type": "stream",
38
+ "text": [
39
+ "processing Metro_new_con_sales_count_raw_uc_condo_month.csv\n",
40
+ "processing Metro_new_con_median_sale_price_per_sqft_uc_sfr_month.csv\n",
41
+ "processing Metro_new_con_sales_count_raw_uc_sfr_month.csv\n",
42
+ "processing Metro_new_con_median_sale_price_uc_sfrcondo_month.csv\n",
43
+ "processing Metro_new_con_median_sale_price_per_sqft_uc_condo_month.csv\n",
44
+ "processing Metro_new_con_sales_count_raw_uc_sfrcondo_month.csv\n",
45
+ "processing Metro_new_con_median_sale_price_uc_condo_month.csv\n",
46
+ "processing Metro_new_con_median_sale_price_uc_sfr_month.csv\n",
47
+ "processing Metro_new_con_median_sale_price_per_sqft_uc_sfrcondo_month.csv\n"
48
+ ]
49
+ },
50
+ {
51
+ "data": {
52
+ "text/html": [
53
+ "<div>\n",
54
+ "<style scoped>\n",
55
+ " .dataframe tbody tr th:only-of-type {\n",
56
+ " vertical-align: middle;\n",
57
+ " }\n",
58
+ "\n",
59
+ " .dataframe tbody tr th {\n",
60
+ " vertical-align: top;\n",
61
+ " }\n",
62
+ "\n",
63
+ " .dataframe thead th {\n",
64
+ " text-align: right;\n",
65
+ " }\n",
66
+ "</style>\n",
67
+ "<table border=\"1\" class=\"dataframe\">\n",
68
+ " <thead>\n",
69
+ " <tr style=\"text-align: right;\">\n",
70
+ " <th></th>\n",
71
+ " <th>RegionID</th>\n",
72
+ " <th>SizeRank</th>\n",
73
+ " <th>RegionName</th>\n",
74
+ " <th>RegionType</th>\n",
75
+ " <th>StateName</th>\n",
76
+ " <th>Home Type</th>\n",
77
+ " <th>Date</th>\n",
78
+ " <th>Sales Count</th>\n",
79
+ " <th>Median Sale Price per Sqft</th>\n",
80
+ " <th>Median Sale Price</th>\n",
81
+ " </tr>\n",
82
+ " </thead>\n",
83
+ " <tbody>\n",
84
+ " <tr>\n",
85
+ " <th>0</th>\n",
86
+ " <td>102001</td>\n",
87
+ " <td>0</td>\n",
88
+ " <td>United States</td>\n",
89
+ " <td>country</td>\n",
90
+ " <td>NaN</td>\n",
91
+ " <td>SFR</td>\n",
92
+ " <td>2018-01-31</td>\n",
93
+ " <td>33940.0</td>\n",
94
+ " <td>137.412316</td>\n",
95
+ " <td>309000.0</td>\n",
96
+ " </tr>\n",
97
+ " <tr>\n",
98
+ " <th>1</th>\n",
99
+ " <td>102001</td>\n",
100
+ " <td>0</td>\n",
101
+ " <td>United States</td>\n",
102
+ " <td>country</td>\n",
103
+ " <td>NaN</td>\n",
104
+ " <td>SFR</td>\n",
105
+ " <td>2018-02-28</td>\n",
106
+ " <td>33304.0</td>\n",
107
+ " <td>137.199170</td>\n",
108
+ " <td>309072.5</td>\n",
109
+ " </tr>\n",
110
+ " <tr>\n",
111
+ " <th>2</th>\n",
112
+ " <td>102001</td>\n",
113
+ " <td>0</td>\n",
114
+ " <td>United States</td>\n",
115
+ " <td>country</td>\n",
116
+ " <td>NaN</td>\n",
117
+ " <td>SFR</td>\n",
118
+ " <td>2018-03-31</td>\n",
119
+ " <td>42641.0</td>\n",
120
+ " <td>139.520863</td>\n",
121
+ " <td>315488.0</td>\n",
122
+ " </tr>\n",
123
+ " <tr>\n",
124
+ " <th>3</th>\n",
125
+ " <td>102001</td>\n",
126
+ " <td>0</td>\n",
127
+ " <td>United States</td>\n",
128
+ " <td>country</td>\n",
129
+ " <td>NaN</td>\n",
130
+ " <td>SFR</td>\n",
131
+ " <td>2018-04-30</td>\n",
132
+ " <td>37588.0</td>\n",
133
+ " <td>139.778110</td>\n",
134
+ " <td>314990.0</td>\n",
135
+ " </tr>\n",
136
+ " <tr>\n",
137
+ " <th>4</th>\n",
138
+ " <td>102001</td>\n",
139
+ " <td>0</td>\n",
140
+ " <td>United States</td>\n",
141
+ " <td>country</td>\n",
142
+ " <td>NaN</td>\n",
143
+ " <td>SFR</td>\n",
144
+ " <td>2018-05-31</td>\n",
145
+ " <td>39933.0</td>\n",
146
+ " <td>143.317968</td>\n",
147
+ " <td>324500.0</td>\n",
148
+ " </tr>\n",
149
+ " <tr>\n",
150
+ " <th>...</th>\n",
151
+ " <td>...</td>\n",
152
+ " <td>...</td>\n",
153
+ " <td>...</td>\n",
154
+ " <td>...</td>\n",
155
+ " <td>...</td>\n",
156
+ " <td>...</td>\n",
157
+ " <td>...</td>\n",
158
+ " <td>...</td>\n",
159
+ " <td>...</td>\n",
160
+ " <td>...</td>\n",
161
+ " </tr>\n",
162
+ " <tr>\n",
163
+ " <th>49482</th>\n",
164
+ " <td>845162</td>\n",
165
+ " <td>535</td>\n",
166
+ " <td>Granbury, TX</td>\n",
167
+ " <td>msa</td>\n",
168
+ " <td>TX</td>\n",
169
+ " <td>all homes</td>\n",
170
+ " <td>2023-07-31</td>\n",
171
+ " <td>31.0</td>\n",
172
+ " <td>NaN</td>\n",
173
+ " <td>NaN</td>\n",
174
+ " </tr>\n",
175
+ " <tr>\n",
176
+ " <th>49483</th>\n",
177
+ " <td>845162</td>\n",
178
+ " <td>535</td>\n",
179
+ " <td>Granbury, TX</td>\n",
180
+ " <td>msa</td>\n",
181
+ " <td>TX</td>\n",
182
+ " <td>all homes</td>\n",
183
+ " <td>2023-08-31</td>\n",
184
+ " <td>33.0</td>\n",
185
+ " <td>NaN</td>\n",
186
+ " <td>NaN</td>\n",
187
+ " </tr>\n",
188
+ " <tr>\n",
189
+ " <th>49484</th>\n",
190
+ " <td>845162</td>\n",
191
+ " <td>535</td>\n",
192
+ " <td>Granbury, TX</td>\n",
193
+ " <td>msa</td>\n",
194
+ " <td>TX</td>\n",
195
+ " <td>all homes</td>\n",
196
+ " <td>2023-09-30</td>\n",
197
+ " <td>26.0</td>\n",
198
+ " <td>NaN</td>\n",
199
+ " <td>NaN</td>\n",
200
+ " </tr>\n",
201
+ " <tr>\n",
202
+ " <th>49485</th>\n",
203
+ " <td>845162</td>\n",
204
+ " <td>535</td>\n",
205
+ " <td>Granbury, TX</td>\n",
206
+ " <td>msa</td>\n",
207
+ " <td>TX</td>\n",
208
+ " <td>all homes</td>\n",
209
+ " <td>2023-10-31</td>\n",
210
+ " <td>24.0</td>\n",
211
+ " <td>NaN</td>\n",
212
+ " <td>NaN</td>\n",
213
+ " </tr>\n",
214
+ " <tr>\n",
215
+ " <th>49486</th>\n",
216
+ " <td>845162</td>\n",
217
+ " <td>535</td>\n",
218
+ " <td>Granbury, TX</td>\n",
219
+ " <td>msa</td>\n",
220
+ " <td>TX</td>\n",
221
+ " <td>all homes</td>\n",
222
+ " <td>2023-11-30</td>\n",
223
+ " <td>16.0</td>\n",
224
+ " <td>NaN</td>\n",
225
+ " <td>NaN</td>\n",
226
+ " </tr>\n",
227
+ " </tbody>\n",
228
+ "</table>\n",
229
+ "<p>49487 rows Γ— 10 columns</p>\n",
230
+ "</div>"
231
+ ],
232
+ "text/plain": [
233
+ " RegionID SizeRank RegionName RegionType StateName Home Type \\\n",
234
+ "0 102001 0 United States country NaN SFR \n",
235
+ "1 102001 0 United States country NaN SFR \n",
236
+ "2 102001 0 United States country NaN SFR \n",
237
+ "3 102001 0 United States country NaN SFR \n",
238
+ "4 102001 0 United States country NaN SFR \n",
239
+ "... ... ... ... ... ... ... \n",
240
+ "49482 845162 535 Granbury, TX msa TX all homes \n",
241
+ "49483 845162 535 Granbury, TX msa TX all homes \n",
242
+ "49484 845162 535 Granbury, TX msa TX all homes \n",
243
+ "49485 845162 535 Granbury, TX msa TX all homes \n",
244
+ "49486 845162 535 Granbury, TX msa TX all homes \n",
245
+ "\n",
246
+ " Date Sales Count Median Sale Price per Sqft Median Sale Price \n",
247
+ "0 2018-01-31 33940.0 137.412316 309000.0 \n",
248
+ "1 2018-02-28 33304.0 137.199170 309072.5 \n",
249
+ "2 2018-03-31 42641.0 139.520863 315488.0 \n",
250
+ "3 2018-04-30 37588.0 139.778110 314990.0 \n",
251
+ "4 2018-05-31 39933.0 143.317968 324500.0 \n",
252
+ "... ... ... ... ... \n",
253
+ "49482 2023-07-31 31.0 NaN NaN \n",
254
+ "49483 2023-08-31 33.0 NaN NaN \n",
255
+ "49484 2023-09-30 26.0 NaN NaN \n",
256
+ "49485 2023-10-31 24.0 NaN NaN \n",
257
+ "49486 2023-11-30 16.0 NaN NaN \n",
258
+ "\n",
259
+ "[49487 rows x 10 columns]"
260
+ ]
261
+ },
262
+ "execution_count": 3,
263
+ "metadata": {},
264
+ "output_type": "execute_result"
265
+ }
266
+ ],
267
+ "source": [
268
+ "data_frames = []\n",
269
+ "\n",
270
+ "exclude_columns = [\n",
271
+ " \"RegionID\",\n",
272
+ " \"SizeRank\",\n",
273
+ " \"RegionName\",\n",
274
+ " \"RegionType\",\n",
275
+ " \"StateName\",\n",
276
+ " \"Home Type\",\n",
277
+ "]\n",
278
+ "\n",
279
+ "slug_column_mappings = {\n",
280
+ " \"_median_sale_price_per_sqft\": \"Median Sale Price per Sqft\",\n",
281
+ " \"_median_sale_price\": \"Median Sale Price\",\n",
282
+ " \"sales_count\": \"Sales Count\",\n",
283
+ "}\n",
284
+ "\n",
285
+ "data_dir_path = get_data_path_for_config(CONFIG_NAME)\n",
286
+ "\n",
287
+ "for filename in os.listdir(data_dir_path):\n",
288
+ " if filename.endswith(\".csv\"):\n",
289
+ " print(\"processing \" + filename)\n",
290
+ " cur_df = pd.read_csv(os.path.join(data_dir_path, filename))\n",
291
+ "\n",
292
+ " cur_df = set_home_type(cur_df, filename)\n",
293
+ "\n",
294
+ " data_frames = handle_slug_column_mappings(\n",
295
+ " data_frames, slug_column_mappings, exclude_columns, filename, cur_df\n",
296
+ " )\n",
297
+ "\n",
298
+ "\n",
299
+ "combined_df = get_combined_df(\n",
300
+ " data_frames,\n",
301
+ " [\n",
302
+ " \"RegionID\",\n",
303
+ " \"SizeRank\",\n",
304
+ " \"RegionName\",\n",
305
+ " \"RegionType\",\n",
306
+ " \"StateName\",\n",
307
+ " \"Home Type\",\n",
308
+ " \"Date\",\n",
309
+ " ],\n",
310
+ ")\n",
311
+ "\n",
312
+ "combined_df"
313
+ ]
314
+ },
315
+ {
316
+ "cell_type": "code",
317
+ "execution_count": 4,
318
+ "metadata": {},
319
+ "outputs": [
320
+ {
321
+ "data": {
322
+ "text/html": [
323
+ "<div>\n",
324
+ "<style scoped>\n",
325
+ " .dataframe tbody tr th:only-of-type {\n",
326
+ " vertical-align: middle;\n",
327
+ " }\n",
328
+ "\n",
329
+ " .dataframe tbody tr th {\n",
330
+ " vertical-align: top;\n",
331
+ " }\n",
332
+ "\n",
333
+ " .dataframe thead th {\n",
334
+ " text-align: right;\n",
335
+ " }\n",
336
+ "</style>\n",
337
+ "<table border=\"1\" class=\"dataframe\">\n",
338
+ " <thead>\n",
339
+ " <tr style=\"text-align: right;\">\n",
340
+ " <th></th>\n",
341
+ " <th>Region ID</th>\n",
342
+ " <th>Size Rank</th>\n",
343
+ " <th>Region</th>\n",
344
+ " <th>Region Type</th>\n",
345
+ " <th>State</th>\n",
346
+ " <th>Home Type</th>\n",
347
+ " <th>Date</th>\n",
348
+ " <th>Sales Count</th>\n",
349
+ " <th>Median Sale Price per Sqft</th>\n",
350
+ " <th>Median Sale Price</th>\n",
351
+ " </tr>\n",
352
+ " </thead>\n",
353
+ " <tbody>\n",
354
+ " <tr>\n",
355
+ " <th>0</th>\n",
356
+ " <td>102001</td>\n",
357
+ " <td>0</td>\n",
358
+ " <td>United States</td>\n",
359
+ " <td>country</td>\n",
360
+ " <td>NaN</td>\n",
361
+ " <td>SFR</td>\n",
362
+ " <td>2018-01-31</td>\n",
363
+ " <td>33940.0</td>\n",
364
+ " <td>137.412316</td>\n",
365
+ " <td>309000.0</td>\n",
366
+ " </tr>\n",
367
+ " <tr>\n",
368
+ " <th>1</th>\n",
369
+ " <td>102001</td>\n",
370
+ " <td>0</td>\n",
371
+ " <td>United States</td>\n",
372
+ " <td>country</td>\n",
373
+ " <td>NaN</td>\n",
374
+ " <td>SFR</td>\n",
375
+ " <td>2018-02-28</td>\n",
376
+ " <td>33304.0</td>\n",
377
+ " <td>137.199170</td>\n",
378
+ " <td>309072.5</td>\n",
379
+ " </tr>\n",
380
+ " <tr>\n",
381
+ " <th>2</th>\n",
382
+ " <td>102001</td>\n",
383
+ " <td>0</td>\n",
384
+ " <td>United States</td>\n",
385
+ " <td>country</td>\n",
386
+ " <td>NaN</td>\n",
387
+ " <td>SFR</td>\n",
388
+ " <td>2018-03-31</td>\n",
389
+ " <td>42641.0</td>\n",
390
+ " <td>139.520863</td>\n",
391
+ " <td>315488.0</td>\n",
392
+ " </tr>\n",
393
+ " <tr>\n",
394
+ " <th>3</th>\n",
395
+ " <td>102001</td>\n",
396
+ " <td>0</td>\n",
397
+ " <td>United States</td>\n",
398
+ " <td>country</td>\n",
399
+ " <td>NaN</td>\n",
400
+ " <td>SFR</td>\n",
401
+ " <td>2018-04-30</td>\n",
402
+ " <td>37588.0</td>\n",
403
+ " <td>139.778110</td>\n",
404
+ " <td>314990.0</td>\n",
405
+ " </tr>\n",
406
+ " <tr>\n",
407
+ " <th>4</th>\n",
408
+ " <td>102001</td>\n",
409
+ " <td>0</td>\n",
410
+ " <td>United States</td>\n",
411
+ " <td>country</td>\n",
412
+ " <td>NaN</td>\n",
413
+ " <td>SFR</td>\n",
414
+ " <td>2018-05-31</td>\n",
415
+ " <td>39933.0</td>\n",
416
+ " <td>143.317968</td>\n",
417
+ " <td>324500.0</td>\n",
418
+ " </tr>\n",
419
+ " <tr>\n",
420
+ " <th>...</th>\n",
421
+ " <td>...</td>\n",
422
+ " <td>...</td>\n",
423
+ " <td>...</td>\n",
424
+ " <td>...</td>\n",
425
+ " <td>...</td>\n",
426
+ " <td>...</td>\n",
427
+ " <td>...</td>\n",
428
+ " <td>...</td>\n",
429
+ " <td>...</td>\n",
430
+ " <td>...</td>\n",
431
+ " </tr>\n",
432
+ " <tr>\n",
433
+ " <th>49482</th>\n",
434
+ " <td>845162</td>\n",
435
+ " <td>535</td>\n",
436
+ " <td>Granbury, TX</td>\n",
437
+ " <td>msa</td>\n",
438
+ " <td>TX</td>\n",
439
+ " <td>all homes</td>\n",
440
+ " <td>2023-07-31</td>\n",
441
+ " <td>31.0</td>\n",
442
+ " <td>NaN</td>\n",
443
+ " <td>NaN</td>\n",
444
+ " </tr>\n",
445
+ " <tr>\n",
446
+ " <th>49483</th>\n",
447
+ " <td>845162</td>\n",
448
+ " <td>535</td>\n",
449
+ " <td>Granbury, TX</td>\n",
450
+ " <td>msa</td>\n",
451
+ " <td>TX</td>\n",
452
+ " <td>all homes</td>\n",
453
+ " <td>2023-08-31</td>\n",
454
+ " <td>33.0</td>\n",
455
+ " <td>NaN</td>\n",
456
+ " <td>NaN</td>\n",
457
+ " </tr>\n",
458
+ " <tr>\n",
459
+ " <th>49484</th>\n",
460
+ " <td>845162</td>\n",
461
+ " <td>535</td>\n",
462
+ " <td>Granbury, TX</td>\n",
463
+ " <td>msa</td>\n",
464
+ " <td>TX</td>\n",
465
+ " <td>all homes</td>\n",
466
+ " <td>2023-09-30</td>\n",
467
+ " <td>26.0</td>\n",
468
+ " <td>NaN</td>\n",
469
+ " <td>NaN</td>\n",
470
+ " </tr>\n",
471
+ " <tr>\n",
472
+ " <th>49485</th>\n",
473
+ " <td>845162</td>\n",
474
+ " <td>535</td>\n",
475
+ " <td>Granbury, TX</td>\n",
476
+ " <td>msa</td>\n",
477
+ " <td>TX</td>\n",
478
+ " <td>all homes</td>\n",
479
+ " <td>2023-10-31</td>\n",
480
+ " <td>24.0</td>\n",
481
+ " <td>NaN</td>\n",
482
+ " <td>NaN</td>\n",
483
+ " </tr>\n",
484
+ " <tr>\n",
485
+ " <th>49486</th>\n",
486
+ " <td>845162</td>\n",
487
+ " <td>535</td>\n",
488
+ " <td>Granbury, TX</td>\n",
489
+ " <td>msa</td>\n",
490
+ " <td>TX</td>\n",
491
+ " <td>all homes</td>\n",
492
+ " <td>2023-11-30</td>\n",
493
+ " <td>16.0</td>\n",
494
+ " <td>NaN</td>\n",
495
+ " <td>NaN</td>\n",
496
+ " </tr>\n",
497
+ " </tbody>\n",
498
+ "</table>\n",
499
+ "<p>49487 rows Γ— 10 columns</p>\n",
500
+ "</div>"
501
+ ],
502
+ "text/plain": [
503
+ " Region ID Size Rank Region Region Type State Home Type \\\n",
504
+ "0 102001 0 United States country NaN SFR \n",
505
+ "1 102001 0 United States country NaN SFR \n",
506
+ "2 102001 0 United States country NaN SFR \n",
507
+ "3 102001 0 United States country NaN SFR \n",
508
+ "4 102001 0 United States country NaN SFR \n",
509
+ "... ... ... ... ... ... ... \n",
510
+ "49482 845162 535 Granbury, TX msa TX all homes \n",
511
+ "49483 845162 535 Granbury, TX msa TX all homes \n",
512
+ "49484 845162 535 Granbury, TX msa TX all homes \n",
513
+ "49485 845162 535 Granbury, TX msa TX all homes \n",
514
+ "49486 845162 535 Granbury, TX msa TX all homes \n",
515
+ "\n",
516
+ " Date Sales Count Median Sale Price per Sqft Median Sale Price \n",
517
+ "0 2018-01-31 33940.0 137.412316 309000.0 \n",
518
+ "1 2018-02-28 33304.0 137.199170 309072.5 \n",
519
+ "2 2018-03-31 42641.0 139.520863 315488.0 \n",
520
+ "3 2018-04-30 37588.0 139.778110 314990.0 \n",
521
+ "4 2018-05-31 39933.0 143.317968 324500.0 \n",
522
+ "... ... ... ... ... \n",
523
+ "49482 2023-07-31 31.0 NaN NaN \n",
524
+ "49483 2023-08-31 33.0 NaN NaN \n",
525
+ "49484 2023-09-30 26.0 NaN NaN \n",
526
+ "49485 2023-10-31 24.0 NaN NaN \n",
527
+ "49486 2023-11-30 16.0 NaN NaN \n",
528
+ "\n",
529
+ "[49487 rows x 10 columns]"
530
+ ]
531
+ },
532
+ "execution_count": 4,
533
+ "metadata": {},
534
+ "output_type": "execute_result"
535
+ }
536
+ ],
537
+ "source": [
538
+ "final_df = combined_df\n",
539
+ "final_df = final_df.rename(\n",
540
+ " columns={\n",
541
+ " \"RegionID\": \"Region ID\",\n",
542
+ " \"SizeRank\": \"Size Rank\",\n",
543
+ " \"RegionName\": \"Region\",\n",
544
+ " \"RegionType\": \"Region Type\",\n",
545
+ " \"StateName\": \"State\",\n",
546
+ " }\n",
547
+ ")\n",
548
+ "\n",
549
+ "final_df[\"Date\"] = pd.to_datetime(final_df[\"Date\"], format=\"%Y-%m-%d\")\n",
550
+ "\n",
551
+ "final_df.sort_values(by=[\"Region ID\", \"Home Type\", \"Date\"])"
552
+ ]
553
+ },
554
+ {
555
+ "cell_type": "code",
556
+ "execution_count": 5,
557
+ "metadata": {},
558
+ "outputs": [],
559
+ "source": [
560
+ "save_final_df_as_jsonl(CONFIG_NAME, final_df)"
561
+ ]
562
+ }
563
+ ],
564
+ "metadata": {
565
+ "kernelspec": {
566
+ "display_name": "Python 3",
567
+ "language": "python",
568
+ "name": "python3"
569
+ },
570
+ "language_info": {
571
+ "codemirror_mode": {
572
+ "name": "ipython",
573
+ "version": 3
574
+ },
575
+ "file_extension": ".py",
576
+ "mimetype": "text/x-python",
577
+ "name": "python",
578
+ "nbconvert_exporter": "python",
579
+ "pygments_lexer": "ipython3",
580
+ "version": "3.12.2"
581
+ }
582
+ },
583
+ "nbformat": 4,
584
+ "nbformat_minor": 2
585
+ }
processors/new_construction.py ADDED
@@ -0,0 +1,98 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ # coding: utf-8
3
+
4
+ # In[1]:
5
+
6
+
7
+ import pandas as pd
8
+ import os
9
+
10
+ from helpers import (
11
+ get_data_path_for_config,
12
+ get_combined_df,
13
+ save_final_df_as_jsonl,
14
+ handle_slug_column_mappings,
15
+ set_home_type,
16
+ )
17
+
18
+
19
+ # In[2]:
20
+
21
+
22
+ CONFIG_NAME = "new_construction"
23
+
24
+
25
+ # In[3]:
26
+
27
+
28
+ data_frames = []
29
+
30
+ exclude_columns = [
31
+ "RegionID",
32
+ "SizeRank",
33
+ "RegionName",
34
+ "RegionType",
35
+ "StateName",
36
+ "Home Type",
37
+ ]
38
+
39
+ slug_column_mappings = {
40
+ "_median_sale_price_per_sqft": "Median Sale Price per Sqft",
41
+ "_median_sale_price": "Median Sale Price",
42
+ "sales_count": "Sales Count",
43
+ }
44
+
45
+ data_dir_path = get_data_path_for_config(CONFIG_NAME)
46
+
47
+ for filename in os.listdir(data_dir_path):
48
+ if filename.endswith(".csv"):
49
+ print("processing " + filename)
50
+ cur_df = pd.read_csv(os.path.join(data_dir_path, filename))
51
+
52
+ cur_df = set_home_type(cur_df, filename)
53
+
54
+ data_frames = handle_slug_column_mappings(
55
+ data_frames, slug_column_mappings, exclude_columns, filename, cur_df
56
+ )
57
+
58
+
59
+ combined_df = get_combined_df(
60
+ data_frames,
61
+ [
62
+ "RegionID",
63
+ "SizeRank",
64
+ "RegionName",
65
+ "RegionType",
66
+ "StateName",
67
+ "Home Type",
68
+ "Date",
69
+ ],
70
+ )
71
+
72
+ combined_df
73
+
74
+
75
+ # In[4]:
76
+
77
+
78
+ final_df = combined_df
79
+ final_df = final_df.rename(
80
+ columns={
81
+ "RegionID": "Region ID",
82
+ "SizeRank": "Size Rank",
83
+ "RegionName": "Region",
84
+ "RegionType": "Region Type",
85
+ "StateName": "State",
86
+ }
87
+ )
88
+
89
+ final_df["Date"] = pd.to_datetime(final_df["Date"], format="%Y-%m-%d")
90
+
91
+ final_df.sort_values(by=["Region ID", "Home Type", "Date"])
92
+
93
+
94
+ # In[5]:
95
+
96
+
97
+ save_final_df_as_jsonl(CONFIG_NAME, final_df)
98
+
processors/rentals.ipynb ADDED
@@ -0,0 +1,1122 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 2,
6
+ "metadata": {},
7
+ "outputs": [],
8
+ "source": [
9
+ "import pandas as pd\n",
10
+ "import os\n",
11
+ "\n",
12
+ "from helpers import (\n",
13
+ " get_data_path_for_config,\n",
14
+ " get_combined_df,\n",
15
+ " save_final_df_as_jsonl,\n",
16
+ " handle_slug_column_mappings,\n",
17
+ " set_home_type,\n",
18
+ ")"
19
+ ]
20
+ },
21
+ {
22
+ "cell_type": "code",
23
+ "execution_count": 1,
24
+ "metadata": {},
25
+ "outputs": [],
26
+ "source": [
27
+ "CONFIG_NAME = \"rentals\""
28
+ ]
29
+ },
30
+ {
31
+ "cell_type": "code",
32
+ "execution_count": 3,
33
+ "metadata": {},
34
+ "outputs": [
35
+ {
36
+ "data": {
37
+ "text/html": [
38
+ "<div>\n",
39
+ "<style scoped>\n",
40
+ " .dataframe tbody tr th:only-of-type {\n",
41
+ " vertical-align: middle;\n",
42
+ " }\n",
43
+ "\n",
44
+ " .dataframe tbody tr th {\n",
45
+ " vertical-align: top;\n",
46
+ " }\n",
47
+ "\n",
48
+ " .dataframe thead th {\n",
49
+ " text-align: right;\n",
50
+ " }\n",
51
+ "</style>\n",
52
+ "<table border=\"1\" class=\"dataframe\">\n",
53
+ " <thead>\n",
54
+ " <tr style=\"text-align: right;\">\n",
55
+ " <th></th>\n",
56
+ " <th>RegionID</th>\n",
57
+ " <th>SizeRank</th>\n",
58
+ " <th>RegionName</th>\n",
59
+ " <th>RegionType</th>\n",
60
+ " <th>StateName</th>\n",
61
+ " <th>Home Type</th>\n",
62
+ " <th>State</th>\n",
63
+ " <th>Metro</th>\n",
64
+ " <th>StateCodeFIPS</th>\n",
65
+ " <th>MunicipalCodeFIPS</th>\n",
66
+ " <th>Date</th>\n",
67
+ " <th>Rent (Smoothed)</th>\n",
68
+ " <th>CountyName</th>\n",
69
+ " <th>Rent (Smoothed) (Seasonally Adjusted)</th>\n",
70
+ " <th>City</th>\n",
71
+ " </tr>\n",
72
+ " </thead>\n",
73
+ " <tbody>\n",
74
+ " <tr>\n",
75
+ " <th>0</th>\n",
76
+ " <td>66</td>\n",
77
+ " <td>146</td>\n",
78
+ " <td>Ada County</td>\n",
79
+ " <td>county</td>\n",
80
+ " <td>ID</td>\n",
81
+ " <td>all homes plus multifamily</td>\n",
82
+ " <td>16.0</td>\n",
83
+ " <td>Boise City, ID</td>\n",
84
+ " <td>16.0</td>\n",
85
+ " <td>1.0</td>\n",
86
+ " <td>2015-01-31</td>\n",
87
+ " <td>927.493763</td>\n",
88
+ " <td>NaN</td>\n",
89
+ " <td>927.493763</td>\n",
90
+ " <td>NaN</td>\n",
91
+ " </tr>\n",
92
+ " <tr>\n",
93
+ " <th>1</th>\n",
94
+ " <td>66</td>\n",
95
+ " <td>146</td>\n",
96
+ " <td>Ada County</td>\n",
97
+ " <td>county</td>\n",
98
+ " <td>ID</td>\n",
99
+ " <td>all homes plus multifamily</td>\n",
100
+ " <td>16.0</td>\n",
101
+ " <td>Boise City, ID</td>\n",
102
+ " <td>16.0</td>\n",
103
+ " <td>1.0</td>\n",
104
+ " <td>2015-02-28</td>\n",
105
+ " <td>931.690623</td>\n",
106
+ " <td>NaN</td>\n",
107
+ " <td>931.690623</td>\n",
108
+ " <td>NaN</td>\n",
109
+ " </tr>\n",
110
+ " <tr>\n",
111
+ " <th>2</th>\n",
112
+ " <td>66</td>\n",
113
+ " <td>146</td>\n",
114
+ " <td>Ada County</td>\n",
115
+ " <td>county</td>\n",
116
+ " <td>ID</td>\n",
117
+ " <td>all homes plus multifamily</td>\n",
118
+ " <td>16.0</td>\n",
119
+ " <td>Boise City, ID</td>\n",
120
+ " <td>16.0</td>\n",
121
+ " <td>1.0</td>\n",
122
+ " <td>2015-03-31</td>\n",
123
+ " <td>932.568601</td>\n",
124
+ " <td>NaN</td>\n",
125
+ " <td>932.568601</td>\n",
126
+ " <td>NaN</td>\n",
127
+ " </tr>\n",
128
+ " <tr>\n",
129
+ " <th>3</th>\n",
130
+ " <td>66</td>\n",
131
+ " <td>146</td>\n",
132
+ " <td>Ada County</td>\n",
133
+ " <td>county</td>\n",
134
+ " <td>ID</td>\n",
135
+ " <td>all homes plus multifamily</td>\n",
136
+ " <td>16.0</td>\n",
137
+ " <td>Boise City, ID</td>\n",
138
+ " <td>16.0</td>\n",
139
+ " <td>1.0</td>\n",
140
+ " <td>2015-04-30</td>\n",
141
+ " <td>933.148134</td>\n",
142
+ " <td>NaN</td>\n",
143
+ " <td>933.148134</td>\n",
144
+ " <td>NaN</td>\n",
145
+ " </tr>\n",
146
+ " <tr>\n",
147
+ " <th>4</th>\n",
148
+ " <td>66</td>\n",
149
+ " <td>146</td>\n",
150
+ " <td>Ada County</td>\n",
151
+ " <td>county</td>\n",
152
+ " <td>ID</td>\n",
153
+ " <td>all homes plus multifamily</td>\n",
154
+ " <td>16.0</td>\n",
155
+ " <td>Boise City, ID</td>\n",
156
+ " <td>16.0</td>\n",
157
+ " <td>1.0</td>\n",
158
+ " <td>2015-05-31</td>\n",
159
+ " <td>941.045724</td>\n",
160
+ " <td>NaN</td>\n",
161
+ " <td>941.045724</td>\n",
162
+ " <td>NaN</td>\n",
163
+ " </tr>\n",
164
+ " <tr>\n",
165
+ " <th>...</th>\n",
166
+ " <td>...</td>\n",
167
+ " <td>...</td>\n",
168
+ " <td>...</td>\n",
169
+ " <td>...</td>\n",
170
+ " <td>...</td>\n",
171
+ " <td>...</td>\n",
172
+ " <td>...</td>\n",
173
+ " <td>...</td>\n",
174
+ " <td>...</td>\n",
175
+ " <td>...</td>\n",
176
+ " <td>...</td>\n",
177
+ " <td>...</td>\n",
178
+ " <td>...</td>\n",
179
+ " <td>...</td>\n",
180
+ " <td>...</td>\n",
181
+ " </tr>\n",
182
+ " <tr>\n",
183
+ " <th>1258735</th>\n",
184
+ " <td>857850</td>\n",
185
+ " <td>713</td>\n",
186
+ " <td>Cherry Hill</td>\n",
187
+ " <td>city</td>\n",
188
+ " <td>NJ</td>\n",
189
+ " <td>all homes plus multifamily</td>\n",
190
+ " <td>NJ</td>\n",
191
+ " <td>Philadelphia-Camden-Wilmington, PA-NJ-DE-MD</td>\n",
192
+ " <td>NaN</td>\n",
193
+ " <td>NaN</td>\n",
194
+ " <td>2023-08-31</td>\n",
195
+ " <td>2291.604800</td>\n",
196
+ " <td>Camden County</td>\n",
197
+ " <td>2244.961006</td>\n",
198
+ " <td>NaN</td>\n",
199
+ " </tr>\n",
200
+ " <tr>\n",
201
+ " <th>1258736</th>\n",
202
+ " <td>857850</td>\n",
203
+ " <td>713</td>\n",
204
+ " <td>Cherry Hill</td>\n",
205
+ " <td>city</td>\n",
206
+ " <td>NJ</td>\n",
207
+ " <td>all homes plus multifamily</td>\n",
208
+ " <td>NJ</td>\n",
209
+ " <td>Philadelphia-Camden-Wilmington, PA-NJ-DE-MD</td>\n",
210
+ " <td>NaN</td>\n",
211
+ " <td>NaN</td>\n",
212
+ " <td>2023-09-30</td>\n",
213
+ " <td>2296.188906</td>\n",
214
+ " <td>Camden County</td>\n",
215
+ " <td>2254.213172</td>\n",
216
+ " <td>NaN</td>\n",
217
+ " </tr>\n",
218
+ " <tr>\n",
219
+ " <th>1258737</th>\n",
220
+ " <td>857850</td>\n",
221
+ " <td>713</td>\n",
222
+ " <td>Cherry Hill</td>\n",
223
+ " <td>city</td>\n",
224
+ " <td>NJ</td>\n",
225
+ " <td>all homes plus multifamily</td>\n",
226
+ " <td>NJ</td>\n",
227
+ " <td>Philadelphia-Camden-Wilmington, PA-NJ-DE-MD</td>\n",
228
+ " <td>NaN</td>\n",
229
+ " <td>NaN</td>\n",
230
+ " <td>2023-10-31</td>\n",
231
+ " <td>2292.270938</td>\n",
232
+ " <td>Camden County</td>\n",
233
+ " <td>2261.540446</td>\n",
234
+ " <td>NaN</td>\n",
235
+ " </tr>\n",
236
+ " <tr>\n",
237
+ " <th>1258738</th>\n",
238
+ " <td>857850</td>\n",
239
+ " <td>713</td>\n",
240
+ " <td>Cherry Hill</td>\n",
241
+ " <td>city</td>\n",
242
+ " <td>NJ</td>\n",
243
+ " <td>all homes plus multifamily</td>\n",
244
+ " <td>NJ</td>\n",
245
+ " <td>Philadelphia-Camden-Wilmington, PA-NJ-DE-MD</td>\n",
246
+ " <td>NaN</td>\n",
247
+ " <td>NaN</td>\n",
248
+ " <td>2023-11-30</td>\n",
249
+ " <td>2253.417140</td>\n",
250
+ " <td>Camden County</td>\n",
251
+ " <td>2257.956024</td>\n",
252
+ " <td>NaN</td>\n",
253
+ " </tr>\n",
254
+ " <tr>\n",
255
+ " <th>1258739</th>\n",
256
+ " <td>857850</td>\n",
257
+ " <td>713</td>\n",
258
+ " <td>Cherry Hill</td>\n",
259
+ " <td>city</td>\n",
260
+ " <td>NJ</td>\n",
261
+ " <td>all homes plus multifamily</td>\n",
262
+ " <td>NJ</td>\n",
263
+ " <td>Philadelphia-Camden-Wilmington, PA-NJ-DE-MD</td>\n",
264
+ " <td>NaN</td>\n",
265
+ " <td>NaN</td>\n",
266
+ " <td>2023-12-31</td>\n",
267
+ " <td>2280.830303</td>\n",
268
+ " <td>Camden County</td>\n",
269
+ " <td>2280.830303</td>\n",
270
+ " <td>NaN</td>\n",
271
+ " </tr>\n",
272
+ " </tbody>\n",
273
+ "</table>\n",
274
+ "<p>1258740 rows Γ— 15 columns</p>\n",
275
+ "</div>"
276
+ ],
277
+ "text/plain": [
278
+ " RegionID SizeRank RegionName RegionType StateName \\\n",
279
+ "0 66 146 Ada County county ID \n",
280
+ "1 66 146 Ada County county ID \n",
281
+ "2 66 146 Ada County county ID \n",
282
+ "3 66 146 Ada County county ID \n",
283
+ "4 66 146 Ada County county ID \n",
284
+ "... ... ... ... ... ... \n",
285
+ "1258735 857850 713 Cherry Hill city NJ \n",
286
+ "1258736 857850 713 Cherry Hill city NJ \n",
287
+ "1258737 857850 713 Cherry Hill city NJ \n",
288
+ "1258738 857850 713 Cherry Hill city NJ \n",
289
+ "1258739 857850 713 Cherry Hill city NJ \n",
290
+ "\n",
291
+ " Home Type State \\\n",
292
+ "0 all homes plus multifamily 16.0 \n",
293
+ "1 all homes plus multifamily 16.0 \n",
294
+ "2 all homes plus multifamily 16.0 \n",
295
+ "3 all homes plus multifamily 16.0 \n",
296
+ "4 all homes plus multifamily 16.0 \n",
297
+ "... ... ... \n",
298
+ "1258735 all homes plus multifamily NJ \n",
299
+ "1258736 all homes plus multifamily NJ \n",
300
+ "1258737 all homes plus multifamily NJ \n",
301
+ "1258738 all homes plus multifamily NJ \n",
302
+ "1258739 all homes plus multifamily NJ \n",
303
+ "\n",
304
+ " Metro StateCodeFIPS \\\n",
305
+ "0 Boise City, ID 16.0 \n",
306
+ "1 Boise City, ID 16.0 \n",
307
+ "2 Boise City, ID 16.0 \n",
308
+ "3 Boise City, ID 16.0 \n",
309
+ "4 Boise City, ID 16.0 \n",
310
+ "... ... ... \n",
311
+ "1258735 Philadelphia-Camden-Wilmington, PA-NJ-DE-MD NaN \n",
312
+ "1258736 Philadelphia-Camden-Wilmington, PA-NJ-DE-MD NaN \n",
313
+ "1258737 Philadelphia-Camden-Wilmington, PA-NJ-DE-MD NaN \n",
314
+ "1258738 Philadelphia-Camden-Wilmington, PA-NJ-DE-MD NaN \n",
315
+ "1258739 Philadelphia-Camden-Wilmington, PA-NJ-DE-MD NaN \n",
316
+ "\n",
317
+ " MunicipalCodeFIPS Date Rent (Smoothed) CountyName \\\n",
318
+ "0 1.0 2015-01-31 927.493763 NaN \n",
319
+ "1 1.0 2015-02-28 931.690623 NaN \n",
320
+ "2 1.0 2015-03-31 932.568601 NaN \n",
321
+ "3 1.0 2015-04-30 933.148134 NaN \n",
322
+ "4 1.0 2015-05-31 941.045724 NaN \n",
323
+ "... ... ... ... ... \n",
324
+ "1258735 NaN 2023-08-31 2291.604800 Camden County \n",
325
+ "1258736 NaN 2023-09-30 2296.188906 Camden County \n",
326
+ "1258737 NaN 2023-10-31 2292.270938 Camden County \n",
327
+ "1258738 NaN 2023-11-30 2253.417140 Camden County \n",
328
+ "1258739 NaN 2023-12-31 2280.830303 Camden County \n",
329
+ "\n",
330
+ " Rent (Smoothed) (Seasonally Adjusted) City \n",
331
+ "0 927.493763 NaN \n",
332
+ "1 931.690623 NaN \n",
333
+ "2 932.568601 NaN \n",
334
+ "3 933.148134 NaN \n",
335
+ "4 941.045724 NaN \n",
336
+ "... ... ... \n",
337
+ "1258735 2244.961006 NaN \n",
338
+ "1258736 2254.213172 NaN \n",
339
+ "1258737 2261.540446 NaN \n",
340
+ "1258738 2257.956024 NaN \n",
341
+ "1258739 2280.830303 NaN \n",
342
+ "\n",
343
+ "[1258740 rows x 15 columns]"
344
+ ]
345
+ },
346
+ "execution_count": 3,
347
+ "metadata": {},
348
+ "output_type": "execute_result"
349
+ }
350
+ ],
351
+ "source": [
352
+ "data_frames = []\n",
353
+ "\n",
354
+ "slug_column_mappings = {\"\": \"Rent\"}\n",
355
+ "\n",
356
+ "data_dir_path = get_data_path_for_config(CONFIG_NAME)\n",
357
+ "\n",
358
+ "for filename in os.listdir(data_dir_path):\n",
359
+ " if filename.endswith(\".csv\"):\n",
360
+ " print(\"processing \" + filename)\n",
361
+ " cur_df = pd.read_csv(os.path.join(data_dir_path, filename))\n",
362
+ " exclude_columns = [\n",
363
+ " \"RegionID\",\n",
364
+ " \"SizeRank\",\n",
365
+ " \"RegionName\",\n",
366
+ " \"RegionType\",\n",
367
+ " \"StateName\",\n",
368
+ " \"Home Type\",\n",
369
+ " ]\n",
370
+ "\n",
371
+ " cur_df[\"RegionName\"] = cur_df[\"RegionName\"].astype(str)\n",
372
+ "\n",
373
+ " cur_df = set_home_type(cur_df, filename)\n",
374
+ "\n",
375
+ " if \"City\" in filename:\n",
376
+ " exclude_columns = [\n",
377
+ " \"RegionID\",\n",
378
+ " \"SizeRank\",\n",
379
+ " \"RegionName\",\n",
380
+ " \"RegionType\",\n",
381
+ " \"StateName\",\n",
382
+ " \"Home Type\",\n",
383
+ " # City Specific\n",
384
+ " \"State\",\n",
385
+ " \"Metro\",\n",
386
+ " \"CountyName\",\n",
387
+ " ]\n",
388
+ " elif \"Zip\" in filename:\n",
389
+ " exclude_columns = [\n",
390
+ " \"RegionID\",\n",
391
+ " \"SizeRank\",\n",
392
+ " \"RegionName\",\n",
393
+ " \"RegionType\",\n",
394
+ " \"StateName\",\n",
395
+ " \"Home Type\",\n",
396
+ " # Zip Specific\n",
397
+ " \"State\",\n",
398
+ " \"City\",\n",
399
+ " \"Metro\",\n",
400
+ " \"CountyName\",\n",
401
+ " ]\n",
402
+ " elif \"County\" in filename:\n",
403
+ " exclude_columns = [\n",
404
+ " \"RegionID\",\n",
405
+ " \"SizeRank\",\n",
406
+ " \"RegionName\",\n",
407
+ " \"RegionType\",\n",
408
+ " \"StateName\",\n",
409
+ " \"Home Type\",\n",
410
+ " # County Specific\n",
411
+ " \"State\",\n",
412
+ " \"Metro\",\n",
413
+ " \"StateCodeFIPS\",\n",
414
+ " \"MunicipalCodeFIPS\",\n",
415
+ " ]\n",
416
+ "\n",
417
+ " data_frames = handle_slug_column_mappings(\n",
418
+ " data_frames, slug_column_mappings, exclude_columns, filename, cur_df\n",
419
+ " )\n",
420
+ "\n",
421
+ "\n",
422
+ "combined_df = get_combined_df(\n",
423
+ " data_frames,\n",
424
+ " [\n",
425
+ " \"RegionID\",\n",
426
+ " \"SizeRank\",\n",
427
+ " \"RegionName\",\n",
428
+ " \"RegionType\",\n",
429
+ " \"StateName\",\n",
430
+ " \"Home Type\",\n",
431
+ " \"Date\",\n",
432
+ " ],\n",
433
+ ")\n",
434
+ "\n",
435
+ "combined_df"
436
+ ]
437
+ },
438
+ {
439
+ "cell_type": "code",
440
+ "execution_count": 4,
441
+ "metadata": {},
442
+ "outputs": [
443
+ {
444
+ "data": {
445
+ "text/html": [
446
+ "<div>\n",
447
+ "<style scoped>\n",
448
+ " .dataframe tbody tr th:only-of-type {\n",
449
+ " vertical-align: middle;\n",
450
+ " }\n",
451
+ "\n",
452
+ " .dataframe tbody tr th {\n",
453
+ " vertical-align: top;\n",
454
+ " }\n",
455
+ "\n",
456
+ " .dataframe thead th {\n",
457
+ " text-align: right;\n",
458
+ " }\n",
459
+ "</style>\n",
460
+ "<table border=\"1\" class=\"dataframe\">\n",
461
+ " <thead>\n",
462
+ " <tr style=\"text-align: right;\">\n",
463
+ " <th></th>\n",
464
+ " <th>RegionID</th>\n",
465
+ " <th>SizeRank</th>\n",
466
+ " <th>RegionName</th>\n",
467
+ " <th>RegionType</th>\n",
468
+ " <th>Home Type</th>\n",
469
+ " <th>State</th>\n",
470
+ " <th>Metro</th>\n",
471
+ " <th>StateCodeFIPS</th>\n",
472
+ " <th>MunicipalCodeFIPS</th>\n",
473
+ " <th>Date</th>\n",
474
+ " <th>Rent (Smoothed)</th>\n",
475
+ " <th>Rent (Smoothed) (Seasonally Adjusted)</th>\n",
476
+ " <th>City</th>\n",
477
+ " <th>County</th>\n",
478
+ " </tr>\n",
479
+ " </thead>\n",
480
+ " <tbody>\n",
481
+ " <tr>\n",
482
+ " <th>0</th>\n",
483
+ " <td>66</td>\n",
484
+ " <td>146</td>\n",
485
+ " <td>Ada County</td>\n",
486
+ " <td>county</td>\n",
487
+ " <td>all homes plus multifamily</td>\n",
488
+ " <td>Ada County</td>\n",
489
+ " <td>Boise City, ID</td>\n",
490
+ " <td>16.0</td>\n",
491
+ " <td>1.0</td>\n",
492
+ " <td>2015-01-31</td>\n",
493
+ " <td>927.493763</td>\n",
494
+ " <td>927.493763</td>\n",
495
+ " <td>NaN</td>\n",
496
+ " <td>Ada County</td>\n",
497
+ " </tr>\n",
498
+ " <tr>\n",
499
+ " <th>1</th>\n",
500
+ " <td>66</td>\n",
501
+ " <td>146</td>\n",
502
+ " <td>Ada County</td>\n",
503
+ " <td>county</td>\n",
504
+ " <td>all homes plus multifamily</td>\n",
505
+ " <td>Ada County</td>\n",
506
+ " <td>Boise City, ID</td>\n",
507
+ " <td>16.0</td>\n",
508
+ " <td>1.0</td>\n",
509
+ " <td>2015-02-28</td>\n",
510
+ " <td>931.690623</td>\n",
511
+ " <td>931.690623</td>\n",
512
+ " <td>NaN</td>\n",
513
+ " <td>Ada County</td>\n",
514
+ " </tr>\n",
515
+ " <tr>\n",
516
+ " <th>2</th>\n",
517
+ " <td>66</td>\n",
518
+ " <td>146</td>\n",
519
+ " <td>Ada County</td>\n",
520
+ " <td>county</td>\n",
521
+ " <td>all homes plus multifamily</td>\n",
522
+ " <td>Ada County</td>\n",
523
+ " <td>Boise City, ID</td>\n",
524
+ " <td>16.0</td>\n",
525
+ " <td>1.0</td>\n",
526
+ " <td>2015-03-31</td>\n",
527
+ " <td>932.568601</td>\n",
528
+ " <td>932.568601</td>\n",
529
+ " <td>NaN</td>\n",
530
+ " <td>Ada County</td>\n",
531
+ " </tr>\n",
532
+ " <tr>\n",
533
+ " <th>3</th>\n",
534
+ " <td>66</td>\n",
535
+ " <td>146</td>\n",
536
+ " <td>Ada County</td>\n",
537
+ " <td>county</td>\n",
538
+ " <td>all homes plus multifamily</td>\n",
539
+ " <td>Ada County</td>\n",
540
+ " <td>Boise City, ID</td>\n",
541
+ " <td>16.0</td>\n",
542
+ " <td>1.0</td>\n",
543
+ " <td>2015-04-30</td>\n",
544
+ " <td>933.148134</td>\n",
545
+ " <td>933.148134</td>\n",
546
+ " <td>NaN</td>\n",
547
+ " <td>Ada County</td>\n",
548
+ " </tr>\n",
549
+ " <tr>\n",
550
+ " <th>4</th>\n",
551
+ " <td>66</td>\n",
552
+ " <td>146</td>\n",
553
+ " <td>Ada County</td>\n",
554
+ " <td>county</td>\n",
555
+ " <td>all homes plus multifamily</td>\n",
556
+ " <td>Ada County</td>\n",
557
+ " <td>Boise City, ID</td>\n",
558
+ " <td>16.0</td>\n",
559
+ " <td>1.0</td>\n",
560
+ " <td>2015-05-31</td>\n",
561
+ " <td>941.045724</td>\n",
562
+ " <td>941.045724</td>\n",
563
+ " <td>NaN</td>\n",
564
+ " <td>Ada County</td>\n",
565
+ " </tr>\n",
566
+ " <tr>\n",
567
+ " <th>...</th>\n",
568
+ " <td>...</td>\n",
569
+ " <td>...</td>\n",
570
+ " <td>...</td>\n",
571
+ " <td>...</td>\n",
572
+ " <td>...</td>\n",
573
+ " <td>...</td>\n",
574
+ " <td>...</td>\n",
575
+ " <td>...</td>\n",
576
+ " <td>...</td>\n",
577
+ " <td>...</td>\n",
578
+ " <td>...</td>\n",
579
+ " <td>...</td>\n",
580
+ " <td>...</td>\n",
581
+ " <td>...</td>\n",
582
+ " </tr>\n",
583
+ " <tr>\n",
584
+ " <th>1258735</th>\n",
585
+ " <td>857850</td>\n",
586
+ " <td>713</td>\n",
587
+ " <td>Cherry Hill</td>\n",
588
+ " <td>city</td>\n",
589
+ " <td>all homes plus multifamily</td>\n",
590
+ " <td>Camden County</td>\n",
591
+ " <td>Philadelphia-Camden-Wilmington, PA-NJ-DE-MD</td>\n",
592
+ " <td>NaN</td>\n",
593
+ " <td>NaN</td>\n",
594
+ " <td>2023-08-31</td>\n",
595
+ " <td>2291.604800</td>\n",
596
+ " <td>2244.961006</td>\n",
597
+ " <td>Cherry Hill</td>\n",
598
+ " <td>NaN</td>\n",
599
+ " </tr>\n",
600
+ " <tr>\n",
601
+ " <th>1258736</th>\n",
602
+ " <td>857850</td>\n",
603
+ " <td>713</td>\n",
604
+ " <td>Cherry Hill</td>\n",
605
+ " <td>city</td>\n",
606
+ " <td>all homes plus multifamily</td>\n",
607
+ " <td>Camden County</td>\n",
608
+ " <td>Philadelphia-Camden-Wilmington, PA-NJ-DE-MD</td>\n",
609
+ " <td>NaN</td>\n",
610
+ " <td>NaN</td>\n",
611
+ " <td>2023-09-30</td>\n",
612
+ " <td>2296.188906</td>\n",
613
+ " <td>2254.213172</td>\n",
614
+ " <td>Cherry Hill</td>\n",
615
+ " <td>NaN</td>\n",
616
+ " </tr>\n",
617
+ " <tr>\n",
618
+ " <th>1258737</th>\n",
619
+ " <td>857850</td>\n",
620
+ " <td>713</td>\n",
621
+ " <td>Cherry Hill</td>\n",
622
+ " <td>city</td>\n",
623
+ " <td>all homes plus multifamily</td>\n",
624
+ " <td>Camden County</td>\n",
625
+ " <td>Philadelphia-Camden-Wilmington, PA-NJ-DE-MD</td>\n",
626
+ " <td>NaN</td>\n",
627
+ " <td>NaN</td>\n",
628
+ " <td>2023-10-31</td>\n",
629
+ " <td>2292.270938</td>\n",
630
+ " <td>2261.540446</td>\n",
631
+ " <td>Cherry Hill</td>\n",
632
+ " <td>NaN</td>\n",
633
+ " </tr>\n",
634
+ " <tr>\n",
635
+ " <th>1258738</th>\n",
636
+ " <td>857850</td>\n",
637
+ " <td>713</td>\n",
638
+ " <td>Cherry Hill</td>\n",
639
+ " <td>city</td>\n",
640
+ " <td>all homes plus multifamily</td>\n",
641
+ " <td>Camden County</td>\n",
642
+ " <td>Philadelphia-Camden-Wilmington, PA-NJ-DE-MD</td>\n",
643
+ " <td>NaN</td>\n",
644
+ " <td>NaN</td>\n",
645
+ " <td>2023-11-30</td>\n",
646
+ " <td>2253.417140</td>\n",
647
+ " <td>2257.956024</td>\n",
648
+ " <td>Cherry Hill</td>\n",
649
+ " <td>NaN</td>\n",
650
+ " </tr>\n",
651
+ " <tr>\n",
652
+ " <th>1258739</th>\n",
653
+ " <td>857850</td>\n",
654
+ " <td>713</td>\n",
655
+ " <td>Cherry Hill</td>\n",
656
+ " <td>city</td>\n",
657
+ " <td>all homes plus multifamily</td>\n",
658
+ " <td>Camden County</td>\n",
659
+ " <td>Philadelphia-Camden-Wilmington, PA-NJ-DE-MD</td>\n",
660
+ " <td>NaN</td>\n",
661
+ " <td>NaN</td>\n",
662
+ " <td>2023-12-31</td>\n",
663
+ " <td>2280.830303</td>\n",
664
+ " <td>2280.830303</td>\n",
665
+ " <td>Cherry Hill</td>\n",
666
+ " <td>NaN</td>\n",
667
+ " </tr>\n",
668
+ " </tbody>\n",
669
+ "</table>\n",
670
+ "<p>1258740 rows Γ— 14 columns</p>\n",
671
+ "</div>"
672
+ ],
673
+ "text/plain": [
674
+ " RegionID SizeRank RegionName RegionType \\\n",
675
+ "0 66 146 Ada County county \n",
676
+ "1 66 146 Ada County county \n",
677
+ "2 66 146 Ada County county \n",
678
+ "3 66 146 Ada County county \n",
679
+ "4 66 146 Ada County county \n",
680
+ "... ... ... ... ... \n",
681
+ "1258735 857850 713 Cherry Hill city \n",
682
+ "1258736 857850 713 Cherry Hill city \n",
683
+ "1258737 857850 713 Cherry Hill city \n",
684
+ "1258738 857850 713 Cherry Hill city \n",
685
+ "1258739 857850 713 Cherry Hill city \n",
686
+ "\n",
687
+ " Home Type State \\\n",
688
+ "0 all homes plus multifamily Ada County \n",
689
+ "1 all homes plus multifamily Ada County \n",
690
+ "2 all homes plus multifamily Ada County \n",
691
+ "3 all homes plus multifamily Ada County \n",
692
+ "4 all homes plus multifamily Ada County \n",
693
+ "... ... ... \n",
694
+ "1258735 all homes plus multifamily Camden County \n",
695
+ "1258736 all homes plus multifamily Camden County \n",
696
+ "1258737 all homes plus multifamily Camden County \n",
697
+ "1258738 all homes plus multifamily Camden County \n",
698
+ "1258739 all homes plus multifamily Camden County \n",
699
+ "\n",
700
+ " Metro StateCodeFIPS \\\n",
701
+ "0 Boise City, ID 16.0 \n",
702
+ "1 Boise City, ID 16.0 \n",
703
+ "2 Boise City, ID 16.0 \n",
704
+ "3 Boise City, ID 16.0 \n",
705
+ "4 Boise City, ID 16.0 \n",
706
+ "... ... ... \n",
707
+ "1258735 Philadelphia-Camden-Wilmington, PA-NJ-DE-MD NaN \n",
708
+ "1258736 Philadelphia-Camden-Wilmington, PA-NJ-DE-MD NaN \n",
709
+ "1258737 Philadelphia-Camden-Wilmington, PA-NJ-DE-MD NaN \n",
710
+ "1258738 Philadelphia-Camden-Wilmington, PA-NJ-DE-MD NaN \n",
711
+ "1258739 Philadelphia-Camden-Wilmington, PA-NJ-DE-MD NaN \n",
712
+ "\n",
713
+ " MunicipalCodeFIPS Date Rent (Smoothed) \\\n",
714
+ "0 1.0 2015-01-31 927.493763 \n",
715
+ "1 1.0 2015-02-28 931.690623 \n",
716
+ "2 1.0 2015-03-31 932.568601 \n",
717
+ "3 1.0 2015-04-30 933.148134 \n",
718
+ "4 1.0 2015-05-31 941.045724 \n",
719
+ "... ... ... ... \n",
720
+ "1258735 NaN 2023-08-31 2291.604800 \n",
721
+ "1258736 NaN 2023-09-30 2296.188906 \n",
722
+ "1258737 NaN 2023-10-31 2292.270938 \n",
723
+ "1258738 NaN 2023-11-30 2253.417140 \n",
724
+ "1258739 NaN 2023-12-31 2280.830303 \n",
725
+ "\n",
726
+ " Rent (Smoothed) (Seasonally Adjusted) City County \n",
727
+ "0 927.493763 NaN Ada County \n",
728
+ "1 931.690623 NaN Ada County \n",
729
+ "2 932.568601 NaN Ada County \n",
730
+ "3 933.148134 NaN Ada County \n",
731
+ "4 941.045724 NaN Ada County \n",
732
+ "... ... ... ... \n",
733
+ "1258735 2244.961006 Cherry Hill NaN \n",
734
+ "1258736 2254.213172 Cherry Hill NaN \n",
735
+ "1258737 2261.540446 Cherry Hill NaN \n",
736
+ "1258738 2257.956024 Cherry Hill NaN \n",
737
+ "1258739 2280.830303 Cherry Hill NaN \n",
738
+ "\n",
739
+ "[1258740 rows x 14 columns]"
740
+ ]
741
+ },
742
+ "execution_count": 4,
743
+ "metadata": {},
744
+ "output_type": "execute_result"
745
+ }
746
+ ],
747
+ "source": [
748
+ "final_df = combined_df\n",
749
+ "\n",
750
+ "for index, row in final_df.iterrows():\n",
751
+ " if row[\"RegionType\"] == \"city\":\n",
752
+ " final_df.at[index, \"City\"] = row[\"RegionName\"]\n",
753
+ " elif row[\"RegionType\"] == \"county\":\n",
754
+ " final_df.at[index, \"County\"] = row[\"RegionName\"]\n",
755
+ "\n",
756
+ "# coalesce State and StateName columns\n",
757
+ "final_df[\"State\"] = final_df[\"State\"].combine_first(final_df[\"StateName\"])\n",
758
+ "final_df[\"State\"] = final_df[\"County\"].combine_first(final_df[\"CountyName\"])\n",
759
+ "\n",
760
+ "final_df = final_df.drop(columns=[\"StateName\", \"CountyName\"])\n",
761
+ "final_df"
762
+ ]
763
+ },
764
+ {
765
+ "cell_type": "code",
766
+ "execution_count": 5,
767
+ "metadata": {},
768
+ "outputs": [
769
+ {
770
+ "data": {
771
+ "text/html": [
772
+ "<div>\n",
773
+ "<style scoped>\n",
774
+ " .dataframe tbody tr th:only-of-type {\n",
775
+ " vertical-align: middle;\n",
776
+ " }\n",
777
+ "\n",
778
+ " .dataframe tbody tr th {\n",
779
+ " vertical-align: top;\n",
780
+ " }\n",
781
+ "\n",
782
+ " .dataframe thead th {\n",
783
+ " text-align: right;\n",
784
+ " }\n",
785
+ "</style>\n",
786
+ "<table border=\"1\" class=\"dataframe\">\n",
787
+ " <thead>\n",
788
+ " <tr style=\"text-align: right;\">\n",
789
+ " <th></th>\n",
790
+ " <th>Region ID</th>\n",
791
+ " <th>Size Rank</th>\n",
792
+ " <th>Region</th>\n",
793
+ " <th>Region Type</th>\n",
794
+ " <th>Home Type</th>\n",
795
+ " <th>State</th>\n",
796
+ " <th>Metro</th>\n",
797
+ " <th>State Code FIPS</th>\n",
798
+ " <th>Municipal Code FIPS</th>\n",
799
+ " <th>Date</th>\n",
800
+ " <th>Rent (Smoothed)</th>\n",
801
+ " <th>Rent (Smoothed) (Seasonally Adjusted)</th>\n",
802
+ " <th>City</th>\n",
803
+ " <th>County</th>\n",
804
+ " </tr>\n",
805
+ " </thead>\n",
806
+ " <tbody>\n",
807
+ " <tr>\n",
808
+ " <th>0</th>\n",
809
+ " <td>66</td>\n",
810
+ " <td>146</td>\n",
811
+ " <td>Ada County</td>\n",
812
+ " <td>county</td>\n",
813
+ " <td>all homes plus multifamily</td>\n",
814
+ " <td>Ada County</td>\n",
815
+ " <td>Boise City, ID</td>\n",
816
+ " <td>16.0</td>\n",
817
+ " <td>1.0</td>\n",
818
+ " <td>2015-01-31</td>\n",
819
+ " <td>927.493763</td>\n",
820
+ " <td>927.493763</td>\n",
821
+ " <td>NaN</td>\n",
822
+ " <td>Ada County</td>\n",
823
+ " </tr>\n",
824
+ " <tr>\n",
825
+ " <th>1</th>\n",
826
+ " <td>66</td>\n",
827
+ " <td>146</td>\n",
828
+ " <td>Ada County</td>\n",
829
+ " <td>county</td>\n",
830
+ " <td>all homes plus multifamily</td>\n",
831
+ " <td>Ada County</td>\n",
832
+ " <td>Boise City, ID</td>\n",
833
+ " <td>16.0</td>\n",
834
+ " <td>1.0</td>\n",
835
+ " <td>2015-02-28</td>\n",
836
+ " <td>931.690623</td>\n",
837
+ " <td>931.690623</td>\n",
838
+ " <td>NaN</td>\n",
839
+ " <td>Ada County</td>\n",
840
+ " </tr>\n",
841
+ " <tr>\n",
842
+ " <th>2</th>\n",
843
+ " <td>66</td>\n",
844
+ " <td>146</td>\n",
845
+ " <td>Ada County</td>\n",
846
+ " <td>county</td>\n",
847
+ " <td>all homes plus multifamily</td>\n",
848
+ " <td>Ada County</td>\n",
849
+ " <td>Boise City, ID</td>\n",
850
+ " <td>16.0</td>\n",
851
+ " <td>1.0</td>\n",
852
+ " <td>2015-03-31</td>\n",
853
+ " <td>932.568601</td>\n",
854
+ " <td>932.568601</td>\n",
855
+ " <td>NaN</td>\n",
856
+ " <td>Ada County</td>\n",
857
+ " </tr>\n",
858
+ " <tr>\n",
859
+ " <th>3</th>\n",
860
+ " <td>66</td>\n",
861
+ " <td>146</td>\n",
862
+ " <td>Ada County</td>\n",
863
+ " <td>county</td>\n",
864
+ " <td>all homes plus multifamily</td>\n",
865
+ " <td>Ada County</td>\n",
866
+ " <td>Boise City, ID</td>\n",
867
+ " <td>16.0</td>\n",
868
+ " <td>1.0</td>\n",
869
+ " <td>2015-04-30</td>\n",
870
+ " <td>933.148134</td>\n",
871
+ " <td>933.148134</td>\n",
872
+ " <td>NaN</td>\n",
873
+ " <td>Ada County</td>\n",
874
+ " </tr>\n",
875
+ " <tr>\n",
876
+ " <th>4</th>\n",
877
+ " <td>66</td>\n",
878
+ " <td>146</td>\n",
879
+ " <td>Ada County</td>\n",
880
+ " <td>county</td>\n",
881
+ " <td>all homes plus multifamily</td>\n",
882
+ " <td>Ada County</td>\n",
883
+ " <td>Boise City, ID</td>\n",
884
+ " <td>16.0</td>\n",
885
+ " <td>1.0</td>\n",
886
+ " <td>2015-05-31</td>\n",
887
+ " <td>941.045724</td>\n",
888
+ " <td>941.045724</td>\n",
889
+ " <td>NaN</td>\n",
890
+ " <td>Ada County</td>\n",
891
+ " </tr>\n",
892
+ " <tr>\n",
893
+ " <th>...</th>\n",
894
+ " <td>...</td>\n",
895
+ " <td>...</td>\n",
896
+ " <td>...</td>\n",
897
+ " <td>...</td>\n",
898
+ " <td>...</td>\n",
899
+ " <td>...</td>\n",
900
+ " <td>...</td>\n",
901
+ " <td>...</td>\n",
902
+ " <td>...</td>\n",
903
+ " <td>...</td>\n",
904
+ " <td>...</td>\n",
905
+ " <td>...</td>\n",
906
+ " <td>...</td>\n",
907
+ " <td>...</td>\n",
908
+ " </tr>\n",
909
+ " <tr>\n",
910
+ " <th>1258735</th>\n",
911
+ " <td>857850</td>\n",
912
+ " <td>713</td>\n",
913
+ " <td>Cherry Hill</td>\n",
914
+ " <td>city</td>\n",
915
+ " <td>all homes plus multifamily</td>\n",
916
+ " <td>Camden County</td>\n",
917
+ " <td>Philadelphia-Camden-Wilmington, PA-NJ-DE-MD</td>\n",
918
+ " <td>NaN</td>\n",
919
+ " <td>NaN</td>\n",
920
+ " <td>2023-08-31</td>\n",
921
+ " <td>2291.604800</td>\n",
922
+ " <td>2244.961006</td>\n",
923
+ " <td>Cherry Hill</td>\n",
924
+ " <td>NaN</td>\n",
925
+ " </tr>\n",
926
+ " <tr>\n",
927
+ " <th>1258736</th>\n",
928
+ " <td>857850</td>\n",
929
+ " <td>713</td>\n",
930
+ " <td>Cherry Hill</td>\n",
931
+ " <td>city</td>\n",
932
+ " <td>all homes plus multifamily</td>\n",
933
+ " <td>Camden County</td>\n",
934
+ " <td>Philadelphia-Camden-Wilmington, PA-NJ-DE-MD</td>\n",
935
+ " <td>NaN</td>\n",
936
+ " <td>NaN</td>\n",
937
+ " <td>2023-09-30</td>\n",
938
+ " <td>2296.188906</td>\n",
939
+ " <td>2254.213172</td>\n",
940
+ " <td>Cherry Hill</td>\n",
941
+ " <td>NaN</td>\n",
942
+ " </tr>\n",
943
+ " <tr>\n",
944
+ " <th>1258737</th>\n",
945
+ " <td>857850</td>\n",
946
+ " <td>713</td>\n",
947
+ " <td>Cherry Hill</td>\n",
948
+ " <td>city</td>\n",
949
+ " <td>all homes plus multifamily</td>\n",
950
+ " <td>Camden County</td>\n",
951
+ " <td>Philadelphia-Camden-Wilmington, PA-NJ-DE-MD</td>\n",
952
+ " <td>NaN</td>\n",
953
+ " <td>NaN</td>\n",
954
+ " <td>2023-10-31</td>\n",
955
+ " <td>2292.270938</td>\n",
956
+ " <td>2261.540446</td>\n",
957
+ " <td>Cherry Hill</td>\n",
958
+ " <td>NaN</td>\n",
959
+ " </tr>\n",
960
+ " <tr>\n",
961
+ " <th>1258738</th>\n",
962
+ " <td>857850</td>\n",
963
+ " <td>713</td>\n",
964
+ " <td>Cherry Hill</td>\n",
965
+ " <td>city</td>\n",
966
+ " <td>all homes plus multifamily</td>\n",
967
+ " <td>Camden County</td>\n",
968
+ " <td>Philadelphia-Camden-Wilmington, PA-NJ-DE-MD</td>\n",
969
+ " <td>NaN</td>\n",
970
+ " <td>NaN</td>\n",
971
+ " <td>2023-11-30</td>\n",
972
+ " <td>2253.417140</td>\n",
973
+ " <td>2257.956024</td>\n",
974
+ " <td>Cherry Hill</td>\n",
975
+ " <td>NaN</td>\n",
976
+ " </tr>\n",
977
+ " <tr>\n",
978
+ " <th>1258739</th>\n",
979
+ " <td>857850</td>\n",
980
+ " <td>713</td>\n",
981
+ " <td>Cherry Hill</td>\n",
982
+ " <td>city</td>\n",
983
+ " <td>all homes plus multifamily</td>\n",
984
+ " <td>Camden County</td>\n",
985
+ " <td>Philadelphia-Camden-Wilmington, PA-NJ-DE-MD</td>\n",
986
+ " <td>NaN</td>\n",
987
+ " <td>NaN</td>\n",
988
+ " <td>2023-12-31</td>\n",
989
+ " <td>2280.830303</td>\n",
990
+ " <td>2280.830303</td>\n",
991
+ " <td>Cherry Hill</td>\n",
992
+ " <td>NaN</td>\n",
993
+ " </tr>\n",
994
+ " </tbody>\n",
995
+ "</table>\n",
996
+ "<p>1258740 rows Γ— 14 columns</p>\n",
997
+ "</div>"
998
+ ],
999
+ "text/plain": [
1000
+ " Region ID Size Rank Region Region Type \\\n",
1001
+ "0 66 146 Ada County county \n",
1002
+ "1 66 146 Ada County county \n",
1003
+ "2 66 146 Ada County county \n",
1004
+ "3 66 146 Ada County county \n",
1005
+ "4 66 146 Ada County county \n",
1006
+ "... ... ... ... ... \n",
1007
+ "1258735 857850 713 Cherry Hill city \n",
1008
+ "1258736 857850 713 Cherry Hill city \n",
1009
+ "1258737 857850 713 Cherry Hill city \n",
1010
+ "1258738 857850 713 Cherry Hill city \n",
1011
+ "1258739 857850 713 Cherry Hill city \n",
1012
+ "\n",
1013
+ " Home Type State \\\n",
1014
+ "0 all homes plus multifamily Ada County \n",
1015
+ "1 all homes plus multifamily Ada County \n",
1016
+ "2 all homes plus multifamily Ada County \n",
1017
+ "3 all homes plus multifamily Ada County \n",
1018
+ "4 all homes plus multifamily Ada County \n",
1019
+ "... ... ... \n",
1020
+ "1258735 all homes plus multifamily Camden County \n",
1021
+ "1258736 all homes plus multifamily Camden County \n",
1022
+ "1258737 all homes plus multifamily Camden County \n",
1023
+ "1258738 all homes plus multifamily Camden County \n",
1024
+ "1258739 all homes plus multifamily Camden County \n",
1025
+ "\n",
1026
+ " Metro State Code FIPS \\\n",
1027
+ "0 Boise City, ID 16.0 \n",
1028
+ "1 Boise City, ID 16.0 \n",
1029
+ "2 Boise City, ID 16.0 \n",
1030
+ "3 Boise City, ID 16.0 \n",
1031
+ "4 Boise City, ID 16.0 \n",
1032
+ "... ... ... \n",
1033
+ "1258735 Philadelphia-Camden-Wilmington, PA-NJ-DE-MD NaN \n",
1034
+ "1258736 Philadelphia-Camden-Wilmington, PA-NJ-DE-MD NaN \n",
1035
+ "1258737 Philadelphia-Camden-Wilmington, PA-NJ-DE-MD NaN \n",
1036
+ "1258738 Philadelphia-Camden-Wilmington, PA-NJ-DE-MD NaN \n",
1037
+ "1258739 Philadelphia-Camden-Wilmington, PA-NJ-DE-MD NaN \n",
1038
+ "\n",
1039
+ " Municipal Code FIPS Date Rent (Smoothed) \\\n",
1040
+ "0 1.0 2015-01-31 927.493763 \n",
1041
+ "1 1.0 2015-02-28 931.690623 \n",
1042
+ "2 1.0 2015-03-31 932.568601 \n",
1043
+ "3 1.0 2015-04-30 933.148134 \n",
1044
+ "4 1.0 2015-05-31 941.045724 \n",
1045
+ "... ... ... ... \n",
1046
+ "1258735 NaN 2023-08-31 2291.604800 \n",
1047
+ "1258736 NaN 2023-09-30 2296.188906 \n",
1048
+ "1258737 NaN 2023-10-31 2292.270938 \n",
1049
+ "1258738 NaN 2023-11-30 2253.417140 \n",
1050
+ "1258739 NaN 2023-12-31 2280.830303 \n",
1051
+ "\n",
1052
+ " Rent (Smoothed) (Seasonally Adjusted) City County \n",
1053
+ "0 927.493763 NaN Ada County \n",
1054
+ "1 931.690623 NaN Ada County \n",
1055
+ "2 932.568601 NaN Ada County \n",
1056
+ "3 933.148134 NaN Ada County \n",
1057
+ "4 941.045724 NaN Ada County \n",
1058
+ "... ... ... ... \n",
1059
+ "1258735 2244.961006 Cherry Hill NaN \n",
1060
+ "1258736 2254.213172 Cherry Hill NaN \n",
1061
+ "1258737 2261.540446 Cherry Hill NaN \n",
1062
+ "1258738 2257.956024 Cherry Hill NaN \n",
1063
+ "1258739 2280.830303 Cherry Hill NaN \n",
1064
+ "\n",
1065
+ "[1258740 rows x 14 columns]"
1066
+ ]
1067
+ },
1068
+ "execution_count": 5,
1069
+ "metadata": {},
1070
+ "output_type": "execute_result"
1071
+ }
1072
+ ],
1073
+ "source": [
1074
+ "# Adjust column names\n",
1075
+ "final_df = final_df.rename(\n",
1076
+ " columns={\n",
1077
+ " \"RegionID\": \"Region ID\",\n",
1078
+ " \"SizeRank\": \"Size Rank\",\n",
1079
+ " \"RegionName\": \"Region\",\n",
1080
+ " \"RegionType\": \"Region Type\",\n",
1081
+ " \"StateCodeFIPS\": \"State Code FIPS\",\n",
1082
+ " \"MunicipalCodeFIPS\": \"Municipal Code FIPS\",\n",
1083
+ " }\n",
1084
+ ")\n",
1085
+ "\n",
1086
+ "final_df[\"Date\"] = pd.to_datetime(final_df[\"Date\"], format=\"%Y-%m-%d\")\n",
1087
+ "\n",
1088
+ "final_df"
1089
+ ]
1090
+ },
1091
+ {
1092
+ "cell_type": "code",
1093
+ "execution_count": 6,
1094
+ "metadata": {},
1095
+ "outputs": [],
1096
+ "source": [
1097
+ "save_final_df_as_jsonl(CONFIG_NAME, final_df)"
1098
+ ]
1099
+ }
1100
+ ],
1101
+ "metadata": {
1102
+ "kernelspec": {
1103
+ "display_name": "Python 3",
1104
+ "language": "python",
1105
+ "name": "python3"
1106
+ },
1107
+ "language_info": {
1108
+ "codemirror_mode": {
1109
+ "name": "ipython",
1110
+ "version": 3
1111
+ },
1112
+ "file_extension": ".py",
1113
+ "mimetype": "text/x-python",
1114
+ "name": "python",
1115
+ "nbconvert_exporter": "python",
1116
+ "pygments_lexer": "ipython3",
1117
+ "version": "3.12.2"
1118
+ }
1119
+ },
1120
+ "nbformat": 4,
1121
+ "nbformat_minor": 2
1122
+ }
processors/rentals.py ADDED
@@ -0,0 +1,157 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ # coding: utf-8
3
+
4
+ # In[2]:
5
+
6
+
7
+ import pandas as pd
8
+ import os
9
+
10
+ from helpers import (
11
+ get_data_path_for_config,
12
+ get_combined_df,
13
+ save_final_df_as_jsonl,
14
+ handle_slug_column_mappings,
15
+ set_home_type,
16
+ )
17
+
18
+
19
+ # In[1]:
20
+
21
+
22
+ CONFIG_NAME = "rentals"
23
+
24
+
25
+ # In[3]:
26
+
27
+
28
+ data_frames = []
29
+
30
+ slug_column_mappings = {"": "Rent"}
31
+
32
+ data_dir_path = get_data_path_for_config(CONFIG_NAME)
33
+
34
+ for filename in os.listdir(data_dir_path):
35
+ if filename.endswith(".csv"):
36
+ print("processing " + filename)
37
+ cur_df = pd.read_csv(os.path.join(data_dir_path, filename))
38
+ exclude_columns = [
39
+ "RegionID",
40
+ "SizeRank",
41
+ "RegionName",
42
+ "RegionType",
43
+ "StateName",
44
+ "Home Type",
45
+ ]
46
+
47
+ cur_df["RegionName"] = cur_df["RegionName"].astype(str)
48
+
49
+ cur_df = set_home_type(cur_df, filename)
50
+
51
+ if "City" in filename:
52
+ exclude_columns = [
53
+ "RegionID",
54
+ "SizeRank",
55
+ "RegionName",
56
+ "RegionType",
57
+ "StateName",
58
+ "Home Type",
59
+ # City Specific
60
+ "State",
61
+ "Metro",
62
+ "CountyName",
63
+ ]
64
+ elif "Zip" in filename:
65
+ exclude_columns = [
66
+ "RegionID",
67
+ "SizeRank",
68
+ "RegionName",
69
+ "RegionType",
70
+ "StateName",
71
+ "Home Type",
72
+ # Zip Specific
73
+ "State",
74
+ "City",
75
+ "Metro",
76
+ "CountyName",
77
+ ]
78
+ elif "County" in filename:
79
+ exclude_columns = [
80
+ "RegionID",
81
+ "SizeRank",
82
+ "RegionName",
83
+ "RegionType",
84
+ "StateName",
85
+ "Home Type",
86
+ # County Specific
87
+ "State",
88
+ "Metro",
89
+ "StateCodeFIPS",
90
+ "MunicipalCodeFIPS",
91
+ ]
92
+
93
+ data_frames = handle_slug_column_mappings(
94
+ data_frames, slug_column_mappings, exclude_columns, filename, cur_df
95
+ )
96
+
97
+
98
+ combined_df = get_combined_df(
99
+ data_frames,
100
+ [
101
+ "RegionID",
102
+ "SizeRank",
103
+ "RegionName",
104
+ "RegionType",
105
+ "StateName",
106
+ "Home Type",
107
+ "Date",
108
+ ],
109
+ )
110
+
111
+ combined_df
112
+
113
+
114
+ # In[4]:
115
+
116
+
117
+ final_df = combined_df
118
+
119
+ for index, row in final_df.iterrows():
120
+ if row["RegionType"] == "city":
121
+ final_df.at[index, "City"] = row["RegionName"]
122
+ elif row["RegionType"] == "county":
123
+ final_df.at[index, "County"] = row["RegionName"]
124
+
125
+ # coalesce State and StateName columns
126
+ final_df["State"] = final_df["State"].combine_first(final_df["StateName"])
127
+ final_df["State"] = final_df["County"].combine_first(final_df["CountyName"])
128
+
129
+ final_df = final_df.drop(columns=["StateName", "CountyName"])
130
+ final_df
131
+
132
+
133
+ # In[5]:
134
+
135
+
136
+ # Adjust column names
137
+ final_df = final_df.rename(
138
+ columns={
139
+ "RegionID": "Region ID",
140
+ "SizeRank": "Size Rank",
141
+ "RegionName": "Region",
142
+ "RegionType": "Region Type",
143
+ "StateCodeFIPS": "State Code FIPS",
144
+ "MunicipalCodeFIPS": "Municipal Code FIPS",
145
+ }
146
+ )
147
+
148
+ final_df["Date"] = pd.to_datetime(final_df["Date"], format="%Y-%m-%d")
149
+
150
+ final_df
151
+
152
+
153
+ # In[7]:
154
+
155
+
156
+ save_final_df_as_jsonl(CONFIG_NAME, final_df)
157
+
processors/sales.ipynb ADDED
@@ -0,0 +1,1321 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 1,
6
+ "metadata": {},
7
+ "outputs": [],
8
+ "source": [
9
+ "import pandas as pd\n",
10
+ "import os\n",
11
+ "\n",
12
+ "from helpers import (\n",
13
+ " get_data_path_for_config,\n",
14
+ " get_combined_df,\n",
15
+ " save_final_df_as_jsonl,\n",
16
+ " handle_slug_column_mappings,\n",
17
+ " set_home_type,\n",
18
+ ")"
19
+ ]
20
+ },
21
+ {
22
+ "cell_type": "code",
23
+ "execution_count": 2,
24
+ "metadata": {},
25
+ "outputs": [],
26
+ "source": [
27
+ "CONFIG_NAME = \"sales\""
28
+ ]
29
+ },
30
+ {
31
+ "cell_type": "code",
32
+ "execution_count": 3,
33
+ "metadata": {},
34
+ "outputs": [
35
+ {
36
+ "name": "stdout",
37
+ "output_type": "stream",
38
+ "text": [
39
+ "processing Metro_mean_sale_to_list_uc_sfrcondo_sm_month.csv\n",
40
+ "processing Metro_median_sale_to_list_uc_sfrcondo_week.csv\n",
41
+ "processing Metro_median_sale_price_uc_sfr_week.csv\n",
42
+ "processing Metro_pct_sold_below_list_uc_sfrcondo_sm_month.csv\n",
43
+ "processing Metro_median_sale_price_uc_sfr_sm_sa_week.csv\n",
44
+ "processing Metro_pct_sold_below_list_uc_sfrcondo_month.csv\n",
45
+ "processing Metro_median_sale_price_uc_sfrcondo_sm_week.csv\n",
46
+ "processing Metro_pct_sold_below_list_uc_sfrcondo_sm_week.csv\n",
47
+ "processing Metro_median_sale_price_uc_sfr_month.csv\n",
48
+ "processing Metro_median_sale_to_list_uc_sfrcondo_sm_month.csv\n",
49
+ "processing Metro_pct_sold_above_list_uc_sfrcondo_month.csv\n",
50
+ "processing Metro_median_sale_to_list_uc_sfrcondo_sm_week.csv\n",
51
+ "processing Metro_median_sale_price_uc_sfrcondo_sm_sa_month.csv\n",
52
+ "processing Metro_sales_count_now_uc_sfrcondo_month.csv\n",
53
+ "processing Metro_pct_sold_above_list_uc_sfrcondo_week.csv\n",
54
+ "processing Metro_mean_sale_to_list_uc_sfrcondo_sm_week.csv\n",
55
+ "processing Metro_median_sale_price_uc_sfrcondo_sm_month.csv\n",
56
+ "processing Metro_mean_sale_to_list_uc_sfrcondo_week.csv\n",
57
+ "processing Metro_median_sale_price_uc_sfr_sm_month.csv\n",
58
+ "processing Metro_median_sale_to_list_uc_sfrcondo_month.csv\n",
59
+ "processing Metro_median_sale_price_uc_sfrcondo_sm_sa_week.csv\n",
60
+ "processing Metro_pct_sold_below_list_uc_sfrcondo_week.csv\n",
61
+ "processing Metro_median_sale_price_uc_sfrcondo_week.csv\n",
62
+ "processing Metro_mean_sale_to_list_uc_sfrcondo_month.csv\n",
63
+ "processing Metro_pct_sold_above_list_uc_sfrcondo_sm_week.csv\n",
64
+ "processing Metro_median_sale_price_uc_sfr_sm_week.csv\n",
65
+ "processing Metro_median_sale_price_uc_sfrcondo_month.csv\n",
66
+ "processing Metro_pct_sold_above_list_uc_sfrcondo_sm_month.csv\n"
67
+ ]
68
+ },
69
+ {
70
+ "data": {
71
+ "text/html": [
72
+ "<div>\n",
73
+ "<style scoped>\n",
74
+ " .dataframe tbody tr th:only-of-type {\n",
75
+ " vertical-align: middle;\n",
76
+ " }\n",
77
+ "\n",
78
+ " .dataframe tbody tr th {\n",
79
+ " vertical-align: top;\n",
80
+ " }\n",
81
+ "\n",
82
+ " .dataframe thead th {\n",
83
+ " text-align: right;\n",
84
+ " }\n",
85
+ "</style>\n",
86
+ "<table border=\"1\" class=\"dataframe\">\n",
87
+ " <thead>\n",
88
+ " <tr style=\"text-align: right;\">\n",
89
+ " <th></th>\n",
90
+ " <th>RegionID</th>\n",
91
+ " <th>SizeRank</th>\n",
92
+ " <th>RegionName</th>\n",
93
+ " <th>RegionType</th>\n",
94
+ " <th>StateName</th>\n",
95
+ " <th>Home Type</th>\n",
96
+ " <th>Date</th>\n",
97
+ " <th>Median Sale to List Ratio</th>\n",
98
+ " <th>Median Sale Price</th>\n",
99
+ " <th>Median Sale Price (Smoothed) (Seasonally Adjusted)</th>\n",
100
+ " <th>Median Sale Price (Smoothed)</th>\n",
101
+ " <th>% Sold Below List (Smoothed)</th>\n",
102
+ " <th>Median Sale to List Ratio (Smoothed)</th>\n",
103
+ " <th>% Sold Above List</th>\n",
104
+ " <th>Mean Sale to List Ratio (Smoothed)</th>\n",
105
+ " <th>Mean Sale to List Ratio</th>\n",
106
+ " <th>% Sold Below List</th>\n",
107
+ " <th>% Sold Above List (Smoothed)</th>\n",
108
+ " </tr>\n",
109
+ " </thead>\n",
110
+ " <tbody>\n",
111
+ " <tr>\n",
112
+ " <th>0</th>\n",
113
+ " <td>102001</td>\n",
114
+ " <td>0</td>\n",
115
+ " <td>United States</td>\n",
116
+ " <td>country</td>\n",
117
+ " <td>NaN</td>\n",
118
+ " <td>SFR</td>\n",
119
+ " <td>2008-02-02</td>\n",
120
+ " <td>NaN</td>\n",
121
+ " <td>172000.0</td>\n",
122
+ " <td>NaN</td>\n",
123
+ " <td>NaN</td>\n",
124
+ " <td>NaN</td>\n",
125
+ " <td>NaN</td>\n",
126
+ " <td>NaN</td>\n",
127
+ " <td>NaN</td>\n",
128
+ " <td>NaN</td>\n",
129
+ " <td>NaN</td>\n",
130
+ " <td>NaN</td>\n",
131
+ " </tr>\n",
132
+ " <tr>\n",
133
+ " <th>1</th>\n",
134
+ " <td>102001</td>\n",
135
+ " <td>0</td>\n",
136
+ " <td>United States</td>\n",
137
+ " <td>country</td>\n",
138
+ " <td>NaN</td>\n",
139
+ " <td>SFR</td>\n",
140
+ " <td>2008-02-09</td>\n",
141
+ " <td>NaN</td>\n",
142
+ " <td>165400.0</td>\n",
143
+ " <td>NaN</td>\n",
144
+ " <td>NaN</td>\n",
145
+ " <td>NaN</td>\n",
146
+ " <td>NaN</td>\n",
147
+ " <td>NaN</td>\n",
148
+ " <td>NaN</td>\n",
149
+ " <td>NaN</td>\n",
150
+ " <td>NaN</td>\n",
151
+ " <td>NaN</td>\n",
152
+ " </tr>\n",
153
+ " <tr>\n",
154
+ " <th>2</th>\n",
155
+ " <td>102001</td>\n",
156
+ " <td>0</td>\n",
157
+ " <td>United States</td>\n",
158
+ " <td>country</td>\n",
159
+ " <td>NaN</td>\n",
160
+ " <td>SFR</td>\n",
161
+ " <td>2008-02-16</td>\n",
162
+ " <td>NaN</td>\n",
163
+ " <td>168000.0</td>\n",
164
+ " <td>NaN</td>\n",
165
+ " <td>NaN</td>\n",
166
+ " <td>NaN</td>\n",
167
+ " <td>NaN</td>\n",
168
+ " <td>NaN</td>\n",
169
+ " <td>NaN</td>\n",
170
+ " <td>NaN</td>\n",
171
+ " <td>NaN</td>\n",
172
+ " <td>NaN</td>\n",
173
+ " </tr>\n",
174
+ " <tr>\n",
175
+ " <th>3</th>\n",
176
+ " <td>102001</td>\n",
177
+ " <td>0</td>\n",
178
+ " <td>United States</td>\n",
179
+ " <td>country</td>\n",
180
+ " <td>NaN</td>\n",
181
+ " <td>SFR</td>\n",
182
+ " <td>2008-02-23</td>\n",
183
+ " <td>NaN</td>\n",
184
+ " <td>167600.0</td>\n",
185
+ " <td>NaN</td>\n",
186
+ " <td>167600.0</td>\n",
187
+ " <td>NaN</td>\n",
188
+ " <td>NaN</td>\n",
189
+ " <td>NaN</td>\n",
190
+ " <td>NaN</td>\n",
191
+ " <td>NaN</td>\n",
192
+ " <td>NaN</td>\n",
193
+ " <td>NaN</td>\n",
194
+ " </tr>\n",
195
+ " <tr>\n",
196
+ " <th>4</th>\n",
197
+ " <td>102001</td>\n",
198
+ " <td>0</td>\n",
199
+ " <td>United States</td>\n",
200
+ " <td>country</td>\n",
201
+ " <td>NaN</td>\n",
202
+ " <td>SFR</td>\n",
203
+ " <td>2008-03-01</td>\n",
204
+ " <td>NaN</td>\n",
205
+ " <td>168100.0</td>\n",
206
+ " <td>NaN</td>\n",
207
+ " <td>168100.0</td>\n",
208
+ " <td>NaN</td>\n",
209
+ " <td>NaN</td>\n",
210
+ " <td>NaN</td>\n",
211
+ " <td>NaN</td>\n",
212
+ " <td>NaN</td>\n",
213
+ " <td>NaN</td>\n",
214
+ " <td>NaN</td>\n",
215
+ " </tr>\n",
216
+ " <tr>\n",
217
+ " <th>...</th>\n",
218
+ " <td>...</td>\n",
219
+ " <td>...</td>\n",
220
+ " <td>...</td>\n",
221
+ " <td>...</td>\n",
222
+ " <td>...</td>\n",
223
+ " <td>...</td>\n",
224
+ " <td>...</td>\n",
225
+ " <td>...</td>\n",
226
+ " <td>...</td>\n",
227
+ " <td>...</td>\n",
228
+ " <td>...</td>\n",
229
+ " <td>...</td>\n",
230
+ " <td>...</td>\n",
231
+ " <td>...</td>\n",
232
+ " <td>...</td>\n",
233
+ " <td>...</td>\n",
234
+ " <td>...</td>\n",
235
+ " <td>...</td>\n",
236
+ " </tr>\n",
237
+ " <tr>\n",
238
+ " <th>255019</th>\n",
239
+ " <td>845160</td>\n",
240
+ " <td>198</td>\n",
241
+ " <td>Prescott Valley, AZ</td>\n",
242
+ " <td>msa</td>\n",
243
+ " <td>AZ</td>\n",
244
+ " <td>all homes</td>\n",
245
+ " <td>2023-11-11</td>\n",
246
+ " <td>0.985132</td>\n",
247
+ " <td>515000.0</td>\n",
248
+ " <td>480020.0</td>\n",
249
+ " <td>480020.0</td>\n",
250
+ " <td>0.651221</td>\n",
251
+ " <td>0.982460</td>\n",
252
+ " <td>0.080000</td>\n",
253
+ " <td>0.978546</td>\n",
254
+ " <td>0.983288</td>\n",
255
+ " <td>0.680000</td>\n",
256
+ " <td>0.119711</td>\n",
257
+ " </tr>\n",
258
+ " <tr>\n",
259
+ " <th>255020</th>\n",
260
+ " <td>845160</td>\n",
261
+ " <td>198</td>\n",
262
+ " <td>Prescott Valley, AZ</td>\n",
263
+ " <td>msa</td>\n",
264
+ " <td>AZ</td>\n",
265
+ " <td>all homes</td>\n",
266
+ " <td>2023-11-18</td>\n",
267
+ " <td>0.972559</td>\n",
268
+ " <td>510000.0</td>\n",
269
+ " <td>476901.0</td>\n",
270
+ " <td>476901.0</td>\n",
271
+ " <td>0.659583</td>\n",
272
+ " <td>0.980362</td>\n",
273
+ " <td>0.142857</td>\n",
274
+ " <td>0.972912</td>\n",
275
+ " <td>0.958341</td>\n",
276
+ " <td>0.625000</td>\n",
277
+ " <td>0.120214</td>\n",
278
+ " </tr>\n",
279
+ " <tr>\n",
280
+ " <th>255021</th>\n",
281
+ " <td>845160</td>\n",
282
+ " <td>198</td>\n",
283
+ " <td>Prescott Valley, AZ</td>\n",
284
+ " <td>msa</td>\n",
285
+ " <td>AZ</td>\n",
286
+ " <td>all homes</td>\n",
287
+ " <td>2023-11-25</td>\n",
288
+ " <td>0.979644</td>\n",
289
+ " <td>484500.0</td>\n",
290
+ " <td>496540.0</td>\n",
291
+ " <td>496540.0</td>\n",
292
+ " <td>0.669387</td>\n",
293
+ " <td>0.979179</td>\n",
294
+ " <td>0.088235</td>\n",
295
+ " <td>0.971177</td>\n",
296
+ " <td>0.973797</td>\n",
297
+ " <td>0.705882</td>\n",
298
+ " <td>0.107185</td>\n",
299
+ " </tr>\n",
300
+ " <tr>\n",
301
+ " <th>255022</th>\n",
302
+ " <td>845160</td>\n",
303
+ " <td>198</td>\n",
304
+ " <td>Prescott Valley, AZ</td>\n",
305
+ " <td>msa</td>\n",
306
+ " <td>AZ</td>\n",
307
+ " <td>all homes</td>\n",
308
+ " <td>2023-12-02</td>\n",
309
+ " <td>0.978261</td>\n",
310
+ " <td>538000.0</td>\n",
311
+ " <td>510491.0</td>\n",
312
+ " <td>510491.0</td>\n",
313
+ " <td>0.678777</td>\n",
314
+ " <td>0.978899</td>\n",
315
+ " <td>0.126761</td>\n",
316
+ " <td>0.970576</td>\n",
317
+ " <td>0.966876</td>\n",
318
+ " <td>0.704225</td>\n",
319
+ " <td>0.109463</td>\n",
320
+ " </tr>\n",
321
+ " <tr>\n",
322
+ " <th>255023</th>\n",
323
+ " <td>845160</td>\n",
324
+ " <td>198</td>\n",
325
+ " <td>Prescott Valley, AZ</td>\n",
326
+ " <td>msa</td>\n",
327
+ " <td>AZ</td>\n",
328
+ " <td>all homes</td>\n",
329
+ " <td>2023-12-09</td>\n",
330
+ " <td>0.981498</td>\n",
331
+ " <td>485000.0</td>\n",
332
+ " <td>503423.0</td>\n",
333
+ " <td>503423.0</td>\n",
334
+ " <td>0.658777</td>\n",
335
+ " <td>0.977990</td>\n",
336
+ " <td>0.100000</td>\n",
337
+ " <td>0.970073</td>\n",
338
+ " <td>0.981278</td>\n",
339
+ " <td>0.600000</td>\n",
340
+ " <td>0.114463</td>\n",
341
+ " </tr>\n",
342
+ " </tbody>\n",
343
+ "</table>\n",
344
+ "<p>255024 rows Γ— 18 columns</p>\n",
345
+ "</div>"
346
+ ],
347
+ "text/plain": [
348
+ " RegionID SizeRank RegionName RegionType StateName \\\n",
349
+ "0 102001 0 United States country NaN \n",
350
+ "1 102001 0 United States country NaN \n",
351
+ "2 102001 0 United States country NaN \n",
352
+ "3 102001 0 United States country NaN \n",
353
+ "4 102001 0 United States country NaN \n",
354
+ "... ... ... ... ... ... \n",
355
+ "255019 845160 198 Prescott Valley, AZ msa AZ \n",
356
+ "255020 845160 198 Prescott Valley, AZ msa AZ \n",
357
+ "255021 845160 198 Prescott Valley, AZ msa AZ \n",
358
+ "255022 845160 198 Prescott Valley, AZ msa AZ \n",
359
+ "255023 845160 198 Prescott Valley, AZ msa AZ \n",
360
+ "\n",
361
+ " Home Type Date Median Sale to List Ratio Median Sale Price \\\n",
362
+ "0 SFR 2008-02-02 NaN 172000.0 \n",
363
+ "1 SFR 2008-02-09 NaN 165400.0 \n",
364
+ "2 SFR 2008-02-16 NaN 168000.0 \n",
365
+ "3 SFR 2008-02-23 NaN 167600.0 \n",
366
+ "4 SFR 2008-03-01 NaN 168100.0 \n",
367
+ "... ... ... ... ... \n",
368
+ "255019 all homes 2023-11-11 0.985132 515000.0 \n",
369
+ "255020 all homes 2023-11-18 0.972559 510000.0 \n",
370
+ "255021 all homes 2023-11-25 0.979644 484500.0 \n",
371
+ "255022 all homes 2023-12-02 0.978261 538000.0 \n",
372
+ "255023 all homes 2023-12-09 0.981498 485000.0 \n",
373
+ "\n",
374
+ " Median Sale Price (Smoothed) (Seasonally Adjusted) \\\n",
375
+ "0 NaN \n",
376
+ "1 NaN \n",
377
+ "2 NaN \n",
378
+ "3 NaN \n",
379
+ "4 NaN \n",
380
+ "... ... \n",
381
+ "255019 480020.0 \n",
382
+ "255020 476901.0 \n",
383
+ "255021 496540.0 \n",
384
+ "255022 510491.0 \n",
385
+ "255023 503423.0 \n",
386
+ "\n",
387
+ " Median Sale Price (Smoothed) % Sold Below List (Smoothed) \\\n",
388
+ "0 NaN NaN \n",
389
+ "1 NaN NaN \n",
390
+ "2 NaN NaN \n",
391
+ "3 167600.0 NaN \n",
392
+ "4 168100.0 NaN \n",
393
+ "... ... ... \n",
394
+ "255019 480020.0 0.651221 \n",
395
+ "255020 476901.0 0.659583 \n",
396
+ "255021 496540.0 0.669387 \n",
397
+ "255022 510491.0 0.678777 \n",
398
+ "255023 503423.0 0.658777 \n",
399
+ "\n",
400
+ " Median Sale to List Ratio (Smoothed) % Sold Above List \\\n",
401
+ "0 NaN NaN \n",
402
+ "1 NaN NaN \n",
403
+ "2 NaN NaN \n",
404
+ "3 NaN NaN \n",
405
+ "4 NaN NaN \n",
406
+ "... ... ... \n",
407
+ "255019 0.982460 0.080000 \n",
408
+ "255020 0.980362 0.142857 \n",
409
+ "255021 0.979179 0.088235 \n",
410
+ "255022 0.978899 0.126761 \n",
411
+ "255023 0.977990 0.100000 \n",
412
+ "\n",
413
+ " Mean Sale to List Ratio (Smoothed) Mean Sale to List Ratio \\\n",
414
+ "0 NaN NaN \n",
415
+ "1 NaN NaN \n",
416
+ "2 NaN NaN \n",
417
+ "3 NaN NaN \n",
418
+ "4 NaN NaN \n",
419
+ "... ... ... \n",
420
+ "255019 0.978546 0.983288 \n",
421
+ "255020 0.972912 0.958341 \n",
422
+ "255021 0.971177 0.973797 \n",
423
+ "255022 0.970576 0.966876 \n",
424
+ "255023 0.970073 0.981278 \n",
425
+ "\n",
426
+ " % Sold Below List % Sold Above List (Smoothed) \n",
427
+ "0 NaN NaN \n",
428
+ "1 NaN NaN \n",
429
+ "2 NaN NaN \n",
430
+ "3 NaN NaN \n",
431
+ "4 NaN NaN \n",
432
+ "... ... ... \n",
433
+ "255019 0.680000 0.119711 \n",
434
+ "255020 0.625000 0.120214 \n",
435
+ "255021 0.705882 0.107185 \n",
436
+ "255022 0.704225 0.109463 \n",
437
+ "255023 0.600000 0.114463 \n",
438
+ "\n",
439
+ "[255024 rows x 18 columns]"
440
+ ]
441
+ },
442
+ "execution_count": 3,
443
+ "metadata": {},
444
+ "output_type": "execute_result"
445
+ }
446
+ ],
447
+ "source": [
448
+ "data_frames = []\n",
449
+ "\n",
450
+ "exclude_columns = [\n",
451
+ " \"RegionID\",\n",
452
+ " \"SizeRank\",\n",
453
+ " \"RegionName\",\n",
454
+ " \"RegionType\",\n",
455
+ " \"StateName\",\n",
456
+ " \"Home Type\",\n",
457
+ "]\n",
458
+ "\n",
459
+ "slug_column_mappings = {\n",
460
+ " \"_median_sale_to_list_\": \"Median Sale to List Ratio\",\n",
461
+ " \"_mean_sale_to_list_\": \"Mean Sale to List Ratio\",\n",
462
+ " \"_median_sale_price_\": \"Median Sale Price\",\n",
463
+ " \"_pct_sold_above_list_\": \"% Sold Above List\",\n",
464
+ " \"_pct_sold_below_list_\": \"% Sold Below List\",\n",
465
+ " \"_sales_count_now_\": \"Nowcast\",\n",
466
+ "}\n",
467
+ "\n",
468
+ "data_dir_path = get_data_path_for_config(CONFIG_NAME)\n",
469
+ "\n",
470
+ "for filename in os.listdir(data_dir_path):\n",
471
+ " if filename.endswith(\".csv\"):\n",
472
+ " print(\"processing \" + filename)\n",
473
+ " # ignore monthly data for now since it is redundant\n",
474
+ " if \"month\" in filename:\n",
475
+ " continue\n",
476
+ "\n",
477
+ " cur_df = pd.read_csv(os.path.join(data_dir_path, filename))\n",
478
+ "\n",
479
+ " cur_df = set_home_type(cur_df, filename)\n",
480
+ "\n",
481
+ " data_frames = handle_slug_column_mappings(\n",
482
+ " data_frames, slug_column_mappings, exclude_columns, filename, cur_df\n",
483
+ " )\n",
484
+ "\n",
485
+ "\n",
486
+ "combined_df = get_combined_df(\n",
487
+ " data_frames,\n",
488
+ " [\n",
489
+ " \"RegionID\",\n",
490
+ " \"SizeRank\",\n",
491
+ " \"RegionName\",\n",
492
+ " \"RegionType\",\n",
493
+ " \"StateName\",\n",
494
+ " \"Home Type\",\n",
495
+ " \"Date\",\n",
496
+ " ],\n",
497
+ ")\n",
498
+ "\n",
499
+ "combined_df"
500
+ ]
501
+ },
502
+ {
503
+ "cell_type": "code",
504
+ "execution_count": 4,
505
+ "metadata": {},
506
+ "outputs": [
507
+ {
508
+ "data": {
509
+ "text/html": [
510
+ "<div>\n",
511
+ "<style scoped>\n",
512
+ " .dataframe tbody tr th:only-of-type {\n",
513
+ " vertical-align: middle;\n",
514
+ " }\n",
515
+ "\n",
516
+ " .dataframe tbody tr th {\n",
517
+ " vertical-align: top;\n",
518
+ " }\n",
519
+ "\n",
520
+ " .dataframe thead th {\n",
521
+ " text-align: right;\n",
522
+ " }\n",
523
+ "</style>\n",
524
+ "<table border=\"1\" class=\"dataframe\">\n",
525
+ " <thead>\n",
526
+ " <tr style=\"text-align: right;\">\n",
527
+ " <th></th>\n",
528
+ " <th>Region ID</th>\n",
529
+ " <th>Size Rank</th>\n",
530
+ " <th>Region</th>\n",
531
+ " <th>Region Type</th>\n",
532
+ " <th>State</th>\n",
533
+ " <th>Home Type</th>\n",
534
+ " <th>Date</th>\n",
535
+ " <th>Median Sale to List Ratio</th>\n",
536
+ " <th>Median Sale Price</th>\n",
537
+ " <th>Median Sale Price (Smoothed) (Seasonally Adjusted)</th>\n",
538
+ " <th>Median Sale Price (Smoothed)</th>\n",
539
+ " <th>% Sold Below List (Smoothed)</th>\n",
540
+ " <th>Median Sale to List Ratio (Smoothed)</th>\n",
541
+ " <th>% Sold Above List</th>\n",
542
+ " <th>Mean Sale to List Ratio (Smoothed)</th>\n",
543
+ " <th>Mean Sale to List Ratio</th>\n",
544
+ " <th>% Sold Below List</th>\n",
545
+ " <th>% Sold Above List (Smoothed)</th>\n",
546
+ " </tr>\n",
547
+ " </thead>\n",
548
+ " <tbody>\n",
549
+ " <tr>\n",
550
+ " <th>0</th>\n",
551
+ " <td>102001</td>\n",
552
+ " <td>0</td>\n",
553
+ " <td>United States</td>\n",
554
+ " <td>country</td>\n",
555
+ " <td>NaN</td>\n",
556
+ " <td>SFR</td>\n",
557
+ " <td>2008-02-02</td>\n",
558
+ " <td>NaN</td>\n",
559
+ " <td>172000.0</td>\n",
560
+ " <td>NaN</td>\n",
561
+ " <td>NaN</td>\n",
562
+ " <td>NaN</td>\n",
563
+ " <td>NaN</td>\n",
564
+ " <td>NaN</td>\n",
565
+ " <td>NaN</td>\n",
566
+ " <td>NaN</td>\n",
567
+ " <td>NaN</td>\n",
568
+ " <td>NaN</td>\n",
569
+ " </tr>\n",
570
+ " <tr>\n",
571
+ " <th>1</th>\n",
572
+ " <td>102001</td>\n",
573
+ " <td>0</td>\n",
574
+ " <td>United States</td>\n",
575
+ " <td>country</td>\n",
576
+ " <td>NaN</td>\n",
577
+ " <td>SFR</td>\n",
578
+ " <td>2008-02-09</td>\n",
579
+ " <td>NaN</td>\n",
580
+ " <td>165400.0</td>\n",
581
+ " <td>NaN</td>\n",
582
+ " <td>NaN</td>\n",
583
+ " <td>NaN</td>\n",
584
+ " <td>NaN</td>\n",
585
+ " <td>NaN</td>\n",
586
+ " <td>NaN</td>\n",
587
+ " <td>NaN</td>\n",
588
+ " <td>NaN</td>\n",
589
+ " <td>NaN</td>\n",
590
+ " </tr>\n",
591
+ " <tr>\n",
592
+ " <th>2</th>\n",
593
+ " <td>102001</td>\n",
594
+ " <td>0</td>\n",
595
+ " <td>United States</td>\n",
596
+ " <td>country</td>\n",
597
+ " <td>NaN</td>\n",
598
+ " <td>SFR</td>\n",
599
+ " <td>2008-02-16</td>\n",
600
+ " <td>NaN</td>\n",
601
+ " <td>168000.0</td>\n",
602
+ " <td>NaN</td>\n",
603
+ " <td>NaN</td>\n",
604
+ " <td>NaN</td>\n",
605
+ " <td>NaN</td>\n",
606
+ " <td>NaN</td>\n",
607
+ " <td>NaN</td>\n",
608
+ " <td>NaN</td>\n",
609
+ " <td>NaN</td>\n",
610
+ " <td>NaN</td>\n",
611
+ " </tr>\n",
612
+ " <tr>\n",
613
+ " <th>3</th>\n",
614
+ " <td>102001</td>\n",
615
+ " <td>0</td>\n",
616
+ " <td>United States</td>\n",
617
+ " <td>country</td>\n",
618
+ " <td>NaN</td>\n",
619
+ " <td>SFR</td>\n",
620
+ " <td>2008-02-23</td>\n",
621
+ " <td>NaN</td>\n",
622
+ " <td>167600.0</td>\n",
623
+ " <td>NaN</td>\n",
624
+ " <td>167600.0</td>\n",
625
+ " <td>NaN</td>\n",
626
+ " <td>NaN</td>\n",
627
+ " <td>NaN</td>\n",
628
+ " <td>NaN</td>\n",
629
+ " <td>NaN</td>\n",
630
+ " <td>NaN</td>\n",
631
+ " <td>NaN</td>\n",
632
+ " </tr>\n",
633
+ " <tr>\n",
634
+ " <th>4</th>\n",
635
+ " <td>102001</td>\n",
636
+ " <td>0</td>\n",
637
+ " <td>United States</td>\n",
638
+ " <td>country</td>\n",
639
+ " <td>NaN</td>\n",
640
+ " <td>SFR</td>\n",
641
+ " <td>2008-03-01</td>\n",
642
+ " <td>NaN</td>\n",
643
+ " <td>168100.0</td>\n",
644
+ " <td>NaN</td>\n",
645
+ " <td>168100.0</td>\n",
646
+ " <td>NaN</td>\n",
647
+ " <td>NaN</td>\n",
648
+ " <td>NaN</td>\n",
649
+ " <td>NaN</td>\n",
650
+ " <td>NaN</td>\n",
651
+ " <td>NaN</td>\n",
652
+ " <td>NaN</td>\n",
653
+ " </tr>\n",
654
+ " <tr>\n",
655
+ " <th>...</th>\n",
656
+ " <td>...</td>\n",
657
+ " <td>...</td>\n",
658
+ " <td>...</td>\n",
659
+ " <td>...</td>\n",
660
+ " <td>...</td>\n",
661
+ " <td>...</td>\n",
662
+ " <td>...</td>\n",
663
+ " <td>...</td>\n",
664
+ " <td>...</td>\n",
665
+ " <td>...</td>\n",
666
+ " <td>...</td>\n",
667
+ " <td>...</td>\n",
668
+ " <td>...</td>\n",
669
+ " <td>...</td>\n",
670
+ " <td>...</td>\n",
671
+ " <td>...</td>\n",
672
+ " <td>...</td>\n",
673
+ " <td>...</td>\n",
674
+ " </tr>\n",
675
+ " <tr>\n",
676
+ " <th>255019</th>\n",
677
+ " <td>845160</td>\n",
678
+ " <td>198</td>\n",
679
+ " <td>Prescott Valley, AZ</td>\n",
680
+ " <td>msa</td>\n",
681
+ " <td>AZ</td>\n",
682
+ " <td>all homes</td>\n",
683
+ " <td>2023-11-11</td>\n",
684
+ " <td>0.985132</td>\n",
685
+ " <td>515000.0</td>\n",
686
+ " <td>480020.0</td>\n",
687
+ " <td>480020.0</td>\n",
688
+ " <td>0.651221</td>\n",
689
+ " <td>0.982460</td>\n",
690
+ " <td>0.080000</td>\n",
691
+ " <td>0.978546</td>\n",
692
+ " <td>0.983288</td>\n",
693
+ " <td>0.680000</td>\n",
694
+ " <td>0.119711</td>\n",
695
+ " </tr>\n",
696
+ " <tr>\n",
697
+ " <th>255020</th>\n",
698
+ " <td>845160</td>\n",
699
+ " <td>198</td>\n",
700
+ " <td>Prescott Valley, AZ</td>\n",
701
+ " <td>msa</td>\n",
702
+ " <td>AZ</td>\n",
703
+ " <td>all homes</td>\n",
704
+ " <td>2023-11-18</td>\n",
705
+ " <td>0.972559</td>\n",
706
+ " <td>510000.0</td>\n",
707
+ " <td>476901.0</td>\n",
708
+ " <td>476901.0</td>\n",
709
+ " <td>0.659583</td>\n",
710
+ " <td>0.980362</td>\n",
711
+ " <td>0.142857</td>\n",
712
+ " <td>0.972912</td>\n",
713
+ " <td>0.958341</td>\n",
714
+ " <td>0.625000</td>\n",
715
+ " <td>0.120214</td>\n",
716
+ " </tr>\n",
717
+ " <tr>\n",
718
+ " <th>255021</th>\n",
719
+ " <td>845160</td>\n",
720
+ " <td>198</td>\n",
721
+ " <td>Prescott Valley, AZ</td>\n",
722
+ " <td>msa</td>\n",
723
+ " <td>AZ</td>\n",
724
+ " <td>all homes</td>\n",
725
+ " <td>2023-11-25</td>\n",
726
+ " <td>0.979644</td>\n",
727
+ " <td>484500.0</td>\n",
728
+ " <td>496540.0</td>\n",
729
+ " <td>496540.0</td>\n",
730
+ " <td>0.669387</td>\n",
731
+ " <td>0.979179</td>\n",
732
+ " <td>0.088235</td>\n",
733
+ " <td>0.971177</td>\n",
734
+ " <td>0.973797</td>\n",
735
+ " <td>0.705882</td>\n",
736
+ " <td>0.107185</td>\n",
737
+ " </tr>\n",
738
+ " <tr>\n",
739
+ " <th>255022</th>\n",
740
+ " <td>845160</td>\n",
741
+ " <td>198</td>\n",
742
+ " <td>Prescott Valley, AZ</td>\n",
743
+ " <td>msa</td>\n",
744
+ " <td>AZ</td>\n",
745
+ " <td>all homes</td>\n",
746
+ " <td>2023-12-02</td>\n",
747
+ " <td>0.978261</td>\n",
748
+ " <td>538000.0</td>\n",
749
+ " <td>510491.0</td>\n",
750
+ " <td>510491.0</td>\n",
751
+ " <td>0.678777</td>\n",
752
+ " <td>0.978899</td>\n",
753
+ " <td>0.126761</td>\n",
754
+ " <td>0.970576</td>\n",
755
+ " <td>0.966876</td>\n",
756
+ " <td>0.704225</td>\n",
757
+ " <td>0.109463</td>\n",
758
+ " </tr>\n",
759
+ " <tr>\n",
760
+ " <th>255023</th>\n",
761
+ " <td>845160</td>\n",
762
+ " <td>198</td>\n",
763
+ " <td>Prescott Valley, AZ</td>\n",
764
+ " <td>msa</td>\n",
765
+ " <td>AZ</td>\n",
766
+ " <td>all homes</td>\n",
767
+ " <td>2023-12-09</td>\n",
768
+ " <td>0.981498</td>\n",
769
+ " <td>485000.0</td>\n",
770
+ " <td>503423.0</td>\n",
771
+ " <td>503423.0</td>\n",
772
+ " <td>0.658777</td>\n",
773
+ " <td>0.977990</td>\n",
774
+ " <td>0.100000</td>\n",
775
+ " <td>0.970073</td>\n",
776
+ " <td>0.981278</td>\n",
777
+ " <td>0.600000</td>\n",
778
+ " <td>0.114463</td>\n",
779
+ " </tr>\n",
780
+ " </tbody>\n",
781
+ "</table>\n",
782
+ "<p>255024 rows Γ— 18 columns</p>\n",
783
+ "</div>"
784
+ ],
785
+ "text/plain": [
786
+ " Region ID Size Rank Region Region Type State \\\n",
787
+ "0 102001 0 United States country NaN \n",
788
+ "1 102001 0 United States country NaN \n",
789
+ "2 102001 0 United States country NaN \n",
790
+ "3 102001 0 United States country NaN \n",
791
+ "4 102001 0 United States country NaN \n",
792
+ "... ... ... ... ... ... \n",
793
+ "255019 845160 198 Prescott Valley, AZ msa AZ \n",
794
+ "255020 845160 198 Prescott Valley, AZ msa AZ \n",
795
+ "255021 845160 198 Prescott Valley, AZ msa AZ \n",
796
+ "255022 845160 198 Prescott Valley, AZ msa AZ \n",
797
+ "255023 845160 198 Prescott Valley, AZ msa AZ \n",
798
+ "\n",
799
+ " Home Type Date Median Sale to List Ratio Median Sale Price \\\n",
800
+ "0 SFR 2008-02-02 NaN 172000.0 \n",
801
+ "1 SFR 2008-02-09 NaN 165400.0 \n",
802
+ "2 SFR 2008-02-16 NaN 168000.0 \n",
803
+ "3 SFR 2008-02-23 NaN 167600.0 \n",
804
+ "4 SFR 2008-03-01 NaN 168100.0 \n",
805
+ "... ... ... ... ... \n",
806
+ "255019 all homes 2023-11-11 0.985132 515000.0 \n",
807
+ "255020 all homes 2023-11-18 0.972559 510000.0 \n",
808
+ "255021 all homes 2023-11-25 0.979644 484500.0 \n",
809
+ "255022 all homes 2023-12-02 0.978261 538000.0 \n",
810
+ "255023 all homes 2023-12-09 0.981498 485000.0 \n",
811
+ "\n",
812
+ " Median Sale Price (Smoothed) (Seasonally Adjusted) \\\n",
813
+ "0 NaN \n",
814
+ "1 NaN \n",
815
+ "2 NaN \n",
816
+ "3 NaN \n",
817
+ "4 NaN \n",
818
+ "... ... \n",
819
+ "255019 480020.0 \n",
820
+ "255020 476901.0 \n",
821
+ "255021 496540.0 \n",
822
+ "255022 510491.0 \n",
823
+ "255023 503423.0 \n",
824
+ "\n",
825
+ " Median Sale Price (Smoothed) % Sold Below List (Smoothed) \\\n",
826
+ "0 NaN NaN \n",
827
+ "1 NaN NaN \n",
828
+ "2 NaN NaN \n",
829
+ "3 167600.0 NaN \n",
830
+ "4 168100.0 NaN \n",
831
+ "... ... ... \n",
832
+ "255019 480020.0 0.651221 \n",
833
+ "255020 476901.0 0.659583 \n",
834
+ "255021 496540.0 0.669387 \n",
835
+ "255022 510491.0 0.678777 \n",
836
+ "255023 503423.0 0.658777 \n",
837
+ "\n",
838
+ " Median Sale to List Ratio (Smoothed) % Sold Above List \\\n",
839
+ "0 NaN NaN \n",
840
+ "1 NaN NaN \n",
841
+ "2 NaN NaN \n",
842
+ "3 NaN NaN \n",
843
+ "4 NaN NaN \n",
844
+ "... ... ... \n",
845
+ "255019 0.982460 0.080000 \n",
846
+ "255020 0.980362 0.142857 \n",
847
+ "255021 0.979179 0.088235 \n",
848
+ "255022 0.978899 0.126761 \n",
849
+ "255023 0.977990 0.100000 \n",
850
+ "\n",
851
+ " Mean Sale to List Ratio (Smoothed) Mean Sale to List Ratio \\\n",
852
+ "0 NaN NaN \n",
853
+ "1 NaN NaN \n",
854
+ "2 NaN NaN \n",
855
+ "3 NaN NaN \n",
856
+ "4 NaN NaN \n",
857
+ "... ... ... \n",
858
+ "255019 0.978546 0.983288 \n",
859
+ "255020 0.972912 0.958341 \n",
860
+ "255021 0.971177 0.973797 \n",
861
+ "255022 0.970576 0.966876 \n",
862
+ "255023 0.970073 0.981278 \n",
863
+ "\n",
864
+ " % Sold Below List % Sold Above List (Smoothed) \n",
865
+ "0 NaN NaN \n",
866
+ "1 NaN NaN \n",
867
+ "2 NaN NaN \n",
868
+ "3 NaN NaN \n",
869
+ "4 NaN NaN \n",
870
+ "... ... ... \n",
871
+ "255019 0.680000 0.119711 \n",
872
+ "255020 0.625000 0.120214 \n",
873
+ "255021 0.705882 0.107185 \n",
874
+ "255022 0.704225 0.109463 \n",
875
+ "255023 0.600000 0.114463 \n",
876
+ "\n",
877
+ "[255024 rows x 18 columns]"
878
+ ]
879
+ },
880
+ "execution_count": 4,
881
+ "metadata": {},
882
+ "output_type": "execute_result"
883
+ }
884
+ ],
885
+ "source": [
886
+ "# Adjust column names\n",
887
+ "final_df = combined_df.rename(\n",
888
+ " columns={\n",
889
+ " \"RegionID\": \"Region ID\",\n",
890
+ " \"SizeRank\": \"Size Rank\",\n",
891
+ " \"RegionName\": \"Region\",\n",
892
+ " \"RegionType\": \"Region Type\",\n",
893
+ " \"StateName\": \"State\",\n",
894
+ " }\n",
895
+ ")\n",
896
+ "\n",
897
+ "final_df[\"Date\"] = pd.to_datetime(final_df[\"Date\"])\n",
898
+ "final_df.sort_values(by=[\"Region ID\", \"Home Type\", \"Date\"])"
899
+ ]
900
+ },
901
+ {
902
+ "cell_type": "code",
903
+ "execution_count": 5,
904
+ "metadata": {},
905
+ "outputs": [
906
+ {
907
+ "data": {
908
+ "text/html": [
909
+ "<div>\n",
910
+ "<style scoped>\n",
911
+ " .dataframe tbody tr th:only-of-type {\n",
912
+ " vertical-align: middle;\n",
913
+ " }\n",
914
+ "\n",
915
+ " .dataframe tbody tr th {\n",
916
+ " vertical-align: top;\n",
917
+ " }\n",
918
+ "\n",
919
+ " .dataframe thead th {\n",
920
+ " text-align: right;\n",
921
+ " }\n",
922
+ "</style>\n",
923
+ "<table border=\"1\" class=\"dataframe\">\n",
924
+ " <thead>\n",
925
+ " <tr style=\"text-align: right;\">\n",
926
+ " <th></th>\n",
927
+ " <th>Region ID</th>\n",
928
+ " <th>Size Rank</th>\n",
929
+ " <th>Region</th>\n",
930
+ " <th>Region Type</th>\n",
931
+ " <th>State</th>\n",
932
+ " <th>Home Type</th>\n",
933
+ " <th>Date</th>\n",
934
+ " <th>Median Sale to List Ratio</th>\n",
935
+ " <th>Median Sale Price</th>\n",
936
+ " <th>Median Sale Price (Smoothed) (Seasonally Adjusted)</th>\n",
937
+ " <th>Median Sale Price (Smoothed)</th>\n",
938
+ " <th>% Sold Below List (Smoothed)</th>\n",
939
+ " <th>Median Sale to List Ratio (Smoothed)</th>\n",
940
+ " <th>% Sold Above List</th>\n",
941
+ " <th>Mean Sale to List Ratio (Smoothed)</th>\n",
942
+ " <th>Mean Sale to List Ratio</th>\n",
943
+ " <th>% Sold Below List</th>\n",
944
+ " <th>% Sold Above List (Smoothed)</th>\n",
945
+ " </tr>\n",
946
+ " </thead>\n",
947
+ " <tbody>\n",
948
+ " <tr>\n",
949
+ " <th>0</th>\n",
950
+ " <td>102001</td>\n",
951
+ " <td>0</td>\n",
952
+ " <td>United States</td>\n",
953
+ " <td>country</td>\n",
954
+ " <td>NaN</td>\n",
955
+ " <td>SFR</td>\n",
956
+ " <td>2008-02-02</td>\n",
957
+ " <td>NaN</td>\n",
958
+ " <td>172000.0</td>\n",
959
+ " <td>NaN</td>\n",
960
+ " <td>NaN</td>\n",
961
+ " <td>NaN</td>\n",
962
+ " <td>NaN</td>\n",
963
+ " <td>NaN</td>\n",
964
+ " <td>NaN</td>\n",
965
+ " <td>NaN</td>\n",
966
+ " <td>NaN</td>\n",
967
+ " <td>NaN</td>\n",
968
+ " </tr>\n",
969
+ " <tr>\n",
970
+ " <th>1</th>\n",
971
+ " <td>102001</td>\n",
972
+ " <td>0</td>\n",
973
+ " <td>United States</td>\n",
974
+ " <td>country</td>\n",
975
+ " <td>NaN</td>\n",
976
+ " <td>SFR</td>\n",
977
+ " <td>2008-02-09</td>\n",
978
+ " <td>NaN</td>\n",
979
+ " <td>165400.0</td>\n",
980
+ " <td>NaN</td>\n",
981
+ " <td>NaN</td>\n",
982
+ " <td>NaN</td>\n",
983
+ " <td>NaN</td>\n",
984
+ " <td>NaN</td>\n",
985
+ " <td>NaN</td>\n",
986
+ " <td>NaN</td>\n",
987
+ " <td>NaN</td>\n",
988
+ " <td>NaN</td>\n",
989
+ " </tr>\n",
990
+ " <tr>\n",
991
+ " <th>2</th>\n",
992
+ " <td>102001</td>\n",
993
+ " <td>0</td>\n",
994
+ " <td>United States</td>\n",
995
+ " <td>country</td>\n",
996
+ " <td>NaN</td>\n",
997
+ " <td>SFR</td>\n",
998
+ " <td>2008-02-16</td>\n",
999
+ " <td>NaN</td>\n",
1000
+ " <td>168000.0</td>\n",
1001
+ " <td>NaN</td>\n",
1002
+ " <td>NaN</td>\n",
1003
+ " <td>NaN</td>\n",
1004
+ " <td>NaN</td>\n",
1005
+ " <td>NaN</td>\n",
1006
+ " <td>NaN</td>\n",
1007
+ " <td>NaN</td>\n",
1008
+ " <td>NaN</td>\n",
1009
+ " <td>NaN</td>\n",
1010
+ " </tr>\n",
1011
+ " <tr>\n",
1012
+ " <th>3</th>\n",
1013
+ " <td>102001</td>\n",
1014
+ " <td>0</td>\n",
1015
+ " <td>United States</td>\n",
1016
+ " <td>country</td>\n",
1017
+ " <td>NaN</td>\n",
1018
+ " <td>SFR</td>\n",
1019
+ " <td>2008-02-23</td>\n",
1020
+ " <td>NaN</td>\n",
1021
+ " <td>167600.0</td>\n",
1022
+ " <td>NaN</td>\n",
1023
+ " <td>167600.0</td>\n",
1024
+ " <td>NaN</td>\n",
1025
+ " <td>NaN</td>\n",
1026
+ " <td>NaN</td>\n",
1027
+ " <td>NaN</td>\n",
1028
+ " <td>NaN</td>\n",
1029
+ " <td>NaN</td>\n",
1030
+ " <td>NaN</td>\n",
1031
+ " </tr>\n",
1032
+ " <tr>\n",
1033
+ " <th>4</th>\n",
1034
+ " <td>102001</td>\n",
1035
+ " <td>0</td>\n",
1036
+ " <td>United States</td>\n",
1037
+ " <td>country</td>\n",
1038
+ " <td>NaN</td>\n",
1039
+ " <td>SFR</td>\n",
1040
+ " <td>2008-03-01</td>\n",
1041
+ " <td>NaN</td>\n",
1042
+ " <td>168100.0</td>\n",
1043
+ " <td>NaN</td>\n",
1044
+ " <td>168100.0</td>\n",
1045
+ " <td>NaN</td>\n",
1046
+ " <td>NaN</td>\n",
1047
+ " <td>NaN</td>\n",
1048
+ " <td>NaN</td>\n",
1049
+ " <td>NaN</td>\n",
1050
+ " <td>NaN</td>\n",
1051
+ " <td>NaN</td>\n",
1052
+ " </tr>\n",
1053
+ " <tr>\n",
1054
+ " <th>...</th>\n",
1055
+ " <td>...</td>\n",
1056
+ " <td>...</td>\n",
1057
+ " <td>...</td>\n",
1058
+ " <td>...</td>\n",
1059
+ " <td>...</td>\n",
1060
+ " <td>...</td>\n",
1061
+ " <td>...</td>\n",
1062
+ " <td>...</td>\n",
1063
+ " <td>...</td>\n",
1064
+ " <td>...</td>\n",
1065
+ " <td>...</td>\n",
1066
+ " <td>...</td>\n",
1067
+ " <td>...</td>\n",
1068
+ " <td>...</td>\n",
1069
+ " <td>...</td>\n",
1070
+ " <td>...</td>\n",
1071
+ " <td>...</td>\n",
1072
+ " <td>...</td>\n",
1073
+ " </tr>\n",
1074
+ " <tr>\n",
1075
+ " <th>255019</th>\n",
1076
+ " <td>845160</td>\n",
1077
+ " <td>198</td>\n",
1078
+ " <td>Prescott Valley, AZ</td>\n",
1079
+ " <td>msa</td>\n",
1080
+ " <td>AZ</td>\n",
1081
+ " <td>all homes</td>\n",
1082
+ " <td>2023-11-11</td>\n",
1083
+ " <td>0.985132</td>\n",
1084
+ " <td>515000.0</td>\n",
1085
+ " <td>480020.0</td>\n",
1086
+ " <td>480020.0</td>\n",
1087
+ " <td>0.651221</td>\n",
1088
+ " <td>0.982460</td>\n",
1089
+ " <td>0.080000</td>\n",
1090
+ " <td>0.978546</td>\n",
1091
+ " <td>0.983288</td>\n",
1092
+ " <td>0.680000</td>\n",
1093
+ " <td>0.119711</td>\n",
1094
+ " </tr>\n",
1095
+ " <tr>\n",
1096
+ " <th>255020</th>\n",
1097
+ " <td>845160</td>\n",
1098
+ " <td>198</td>\n",
1099
+ " <td>Prescott Valley, AZ</td>\n",
1100
+ " <td>msa</td>\n",
1101
+ " <td>AZ</td>\n",
1102
+ " <td>all homes</td>\n",
1103
+ " <td>2023-11-18</td>\n",
1104
+ " <td>0.972559</td>\n",
1105
+ " <td>510000.0</td>\n",
1106
+ " <td>476901.0</td>\n",
1107
+ " <td>476901.0</td>\n",
1108
+ " <td>0.659583</td>\n",
1109
+ " <td>0.980362</td>\n",
1110
+ " <td>0.142857</td>\n",
1111
+ " <td>0.972912</td>\n",
1112
+ " <td>0.958341</td>\n",
1113
+ " <td>0.625000</td>\n",
1114
+ " <td>0.120214</td>\n",
1115
+ " </tr>\n",
1116
+ " <tr>\n",
1117
+ " <th>255021</th>\n",
1118
+ " <td>845160</td>\n",
1119
+ " <td>198</td>\n",
1120
+ " <td>Prescott Valley, AZ</td>\n",
1121
+ " <td>msa</td>\n",
1122
+ " <td>AZ</td>\n",
1123
+ " <td>all homes</td>\n",
1124
+ " <td>2023-11-25</td>\n",
1125
+ " <td>0.979644</td>\n",
1126
+ " <td>484500.0</td>\n",
1127
+ " <td>496540.0</td>\n",
1128
+ " <td>496540.0</td>\n",
1129
+ " <td>0.669387</td>\n",
1130
+ " <td>0.979179</td>\n",
1131
+ " <td>0.088235</td>\n",
1132
+ " <td>0.971177</td>\n",
1133
+ " <td>0.973797</td>\n",
1134
+ " <td>0.705882</td>\n",
1135
+ " <td>0.107185</td>\n",
1136
+ " </tr>\n",
1137
+ " <tr>\n",
1138
+ " <th>255022</th>\n",
1139
+ " <td>845160</td>\n",
1140
+ " <td>198</td>\n",
1141
+ " <td>Prescott Valley, AZ</td>\n",
1142
+ " <td>msa</td>\n",
1143
+ " <td>AZ</td>\n",
1144
+ " <td>all homes</td>\n",
1145
+ " <td>2023-12-02</td>\n",
1146
+ " <td>0.978261</td>\n",
1147
+ " <td>538000.0</td>\n",
1148
+ " <td>510491.0</td>\n",
1149
+ " <td>510491.0</td>\n",
1150
+ " <td>0.678777</td>\n",
1151
+ " <td>0.978899</td>\n",
1152
+ " <td>0.126761</td>\n",
1153
+ " <td>0.970576</td>\n",
1154
+ " <td>0.966876</td>\n",
1155
+ " <td>0.704225</td>\n",
1156
+ " <td>0.109463</td>\n",
1157
+ " </tr>\n",
1158
+ " <tr>\n",
1159
+ " <th>255023</th>\n",
1160
+ " <td>845160</td>\n",
1161
+ " <td>198</td>\n",
1162
+ " <td>Prescott Valley, AZ</td>\n",
1163
+ " <td>msa</td>\n",
1164
+ " <td>AZ</td>\n",
1165
+ " <td>all homes</td>\n",
1166
+ " <td>2023-12-09</td>\n",
1167
+ " <td>0.981498</td>\n",
1168
+ " <td>485000.0</td>\n",
1169
+ " <td>503423.0</td>\n",
1170
+ " <td>503423.0</td>\n",
1171
+ " <td>0.658777</td>\n",
1172
+ " <td>0.977990</td>\n",
1173
+ " <td>0.100000</td>\n",
1174
+ " <td>0.970073</td>\n",
1175
+ " <td>0.981278</td>\n",
1176
+ " <td>0.600000</td>\n",
1177
+ " <td>0.114463</td>\n",
1178
+ " </tr>\n",
1179
+ " </tbody>\n",
1180
+ "</table>\n",
1181
+ "<p>255024 rows Γ— 18 columns</p>\n",
1182
+ "</div>"
1183
+ ],
1184
+ "text/plain": [
1185
+ " Region ID Size Rank Region Region Type State \\\n",
1186
+ "0 102001 0 United States country NaN \n",
1187
+ "1 102001 0 United States country NaN \n",
1188
+ "2 102001 0 United States country NaN \n",
1189
+ "3 102001 0 United States country NaN \n",
1190
+ "4 102001 0 United States country NaN \n",
1191
+ "... ... ... ... ... ... \n",
1192
+ "255019 845160 198 Prescott Valley, AZ msa AZ \n",
1193
+ "255020 845160 198 Prescott Valley, AZ msa AZ \n",
1194
+ "255021 845160 198 Prescott Valley, AZ msa AZ \n",
1195
+ "255022 845160 198 Prescott Valley, AZ msa AZ \n",
1196
+ "255023 845160 198 Prescott Valley, AZ msa AZ \n",
1197
+ "\n",
1198
+ " Home Type Date Median Sale to List Ratio Median Sale Price \\\n",
1199
+ "0 SFR 2008-02-02 NaN 172000.0 \n",
1200
+ "1 SFR 2008-02-09 NaN 165400.0 \n",
1201
+ "2 SFR 2008-02-16 NaN 168000.0 \n",
1202
+ "3 SFR 2008-02-23 NaN 167600.0 \n",
1203
+ "4 SFR 2008-03-01 NaN 168100.0 \n",
1204
+ "... ... ... ... ... \n",
1205
+ "255019 all homes 2023-11-11 0.985132 515000.0 \n",
1206
+ "255020 all homes 2023-11-18 0.972559 510000.0 \n",
1207
+ "255021 all homes 2023-11-25 0.979644 484500.0 \n",
1208
+ "255022 all homes 2023-12-02 0.978261 538000.0 \n",
1209
+ "255023 all homes 2023-12-09 0.981498 485000.0 \n",
1210
+ "\n",
1211
+ " Median Sale Price (Smoothed) (Seasonally Adjusted) \\\n",
1212
+ "0 NaN \n",
1213
+ "1 NaN \n",
1214
+ "2 NaN \n",
1215
+ "3 NaN \n",
1216
+ "4 NaN \n",
1217
+ "... ... \n",
1218
+ "255019 480020.0 \n",
1219
+ "255020 476901.0 \n",
1220
+ "255021 496540.0 \n",
1221
+ "255022 510491.0 \n",
1222
+ "255023 503423.0 \n",
1223
+ "\n",
1224
+ " Median Sale Price (Smoothed) % Sold Below List (Smoothed) \\\n",
1225
+ "0 NaN NaN \n",
1226
+ "1 NaN NaN \n",
1227
+ "2 NaN NaN \n",
1228
+ "3 167600.0 NaN \n",
1229
+ "4 168100.0 NaN \n",
1230
+ "... ... ... \n",
1231
+ "255019 480020.0 0.651221 \n",
1232
+ "255020 476901.0 0.659583 \n",
1233
+ "255021 496540.0 0.669387 \n",
1234
+ "255022 510491.0 0.678777 \n",
1235
+ "255023 503423.0 0.658777 \n",
1236
+ "\n",
1237
+ " Median Sale to List Ratio (Smoothed) % Sold Above List \\\n",
1238
+ "0 NaN NaN \n",
1239
+ "1 NaN NaN \n",
1240
+ "2 NaN NaN \n",
1241
+ "3 NaN NaN \n",
1242
+ "4 NaN NaN \n",
1243
+ "... ... ... \n",
1244
+ "255019 0.982460 0.080000 \n",
1245
+ "255020 0.980362 0.142857 \n",
1246
+ "255021 0.979179 0.088235 \n",
1247
+ "255022 0.978899 0.126761 \n",
1248
+ "255023 0.977990 0.100000 \n",
1249
+ "\n",
1250
+ " Mean Sale to List Ratio (Smoothed) Mean Sale to List Ratio \\\n",
1251
+ "0 NaN NaN \n",
1252
+ "1 NaN NaN \n",
1253
+ "2 NaN NaN \n",
1254
+ "3 NaN NaN \n",
1255
+ "4 NaN NaN \n",
1256
+ "... ... ... \n",
1257
+ "255019 0.978546 0.983288 \n",
1258
+ "255020 0.972912 0.958341 \n",
1259
+ "255021 0.971177 0.973797 \n",
1260
+ "255022 0.970576 0.966876 \n",
1261
+ "255023 0.970073 0.981278 \n",
1262
+ "\n",
1263
+ " % Sold Below List % Sold Above List (Smoothed) \n",
1264
+ "0 NaN NaN \n",
1265
+ "1 NaN NaN \n",
1266
+ "2 NaN NaN \n",
1267
+ "3 NaN NaN \n",
1268
+ "4 NaN NaN \n",
1269
+ "... ... ... \n",
1270
+ "255019 0.680000 0.119711 \n",
1271
+ "255020 0.625000 0.120214 \n",
1272
+ "255021 0.705882 0.107185 \n",
1273
+ "255022 0.704225 0.109463 \n",
1274
+ "255023 0.600000 0.114463 \n",
1275
+ "\n",
1276
+ "[255024 rows x 18 columns]"
1277
+ ]
1278
+ },
1279
+ "execution_count": 5,
1280
+ "metadata": {},
1281
+ "output_type": "execute_result"
1282
+ }
1283
+ ],
1284
+ "source": [
1285
+ "final_df[\"Date\"] = pd.to_datetime(final_df[\"Date\"], format=\"%Y-%m-%d\")\n",
1286
+ "\n",
1287
+ "final_df"
1288
+ ]
1289
+ },
1290
+ {
1291
+ "cell_type": "code",
1292
+ "execution_count": 6,
1293
+ "metadata": {},
1294
+ "outputs": [],
1295
+ "source": [
1296
+ "save_final_df_as_jsonl(CONFIG_NAME, final_df)"
1297
+ ]
1298
+ }
1299
+ ],
1300
+ "metadata": {
1301
+ "kernelspec": {
1302
+ "display_name": "Python 3",
1303
+ "language": "python",
1304
+ "name": "python3"
1305
+ },
1306
+ "language_info": {
1307
+ "codemirror_mode": {
1308
+ "name": "ipython",
1309
+ "version": 3
1310
+ },
1311
+ "file_extension": ".py",
1312
+ "mimetype": "text/x-python",
1313
+ "name": "python",
1314
+ "nbconvert_exporter": "python",
1315
+ "pygments_lexer": "ipython3",
1316
+ "version": "3.12.2"
1317
+ }
1318
+ },
1319
+ "nbformat": 4,
1320
+ "nbformat_minor": 2
1321
+ }
processors/sales.py ADDED
@@ -0,0 +1,112 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ # coding: utf-8
3
+
4
+ # In[1]:
5
+
6
+
7
+ import pandas as pd
8
+ import os
9
+
10
+ from helpers import (
11
+ get_data_path_for_config,
12
+ get_combined_df,
13
+ save_final_df_as_jsonl,
14
+ handle_slug_column_mappings,
15
+ set_home_type,
16
+ )
17
+
18
+
19
+ # In[2]:
20
+
21
+
22
+ CONFIG_NAME = "sales"
23
+
24
+
25
+ # In[3]:
26
+
27
+
28
+ data_frames = []
29
+
30
+ exclude_columns = [
31
+ "RegionID",
32
+ "SizeRank",
33
+ "RegionName",
34
+ "RegionType",
35
+ "StateName",
36
+ "Home Type",
37
+ ]
38
+
39
+ slug_column_mappings = {
40
+ "_median_sale_to_list_": "Median Sale to List Ratio",
41
+ "_mean_sale_to_list_": "Mean Sale to List Ratio",
42
+ "_median_sale_price_": "Median Sale Price",
43
+ "_pct_sold_above_list_": "% Sold Above List",
44
+ "_pct_sold_below_list_": "% Sold Below List",
45
+ "_sales_count_now_": "Nowcast",
46
+ }
47
+
48
+ data_dir_path = get_data_path_for_config(CONFIG_NAME)
49
+
50
+ for filename in os.listdir(data_dir_path):
51
+ if filename.endswith(".csv"):
52
+ print("processing " + filename)
53
+ # ignore monthly data for now since it is redundant
54
+ if "month" in filename:
55
+ continue
56
+
57
+ cur_df = pd.read_csv(os.path.join(data_dir_path, filename))
58
+
59
+ cur_df = set_home_type(cur_df, filename)
60
+
61
+ data_frames = handle_slug_column_mappings(
62
+ data_frames, slug_column_mappings, exclude_columns, filename, cur_df
63
+ )
64
+
65
+
66
+ combined_df = get_combined_df(
67
+ data_frames,
68
+ [
69
+ "RegionID",
70
+ "SizeRank",
71
+ "RegionName",
72
+ "RegionType",
73
+ "StateName",
74
+ "Home Type",
75
+ "Date",
76
+ ],
77
+ )
78
+
79
+ combined_df
80
+
81
+
82
+ # In[4]:
83
+
84
+
85
+ # Adjust column names
86
+ final_df = combined_df.rename(
87
+ columns={
88
+ "RegionID": "Region ID",
89
+ "SizeRank": "Size Rank",
90
+ "RegionName": "Region",
91
+ "RegionType": "Region Type",
92
+ "StateName": "State",
93
+ }
94
+ )
95
+
96
+ final_df["Date"] = pd.to_datetime(final_df["Date"])
97
+ final_df.sort_values(by=["Region ID", "Home Type", "Date"])
98
+
99
+
100
+ # In[5]:
101
+
102
+
103
+ final_df["Date"] = pd.to_datetime(final_df["Date"], format="%Y-%m-%d")
104
+
105
+ final_df
106
+
107
+
108
+ # In[6]:
109
+
110
+
111
+ save_final_df_as_jsonl(CONFIG_NAME, final_df)
112
+
test-stuff/dataset_infos.json ADDED
@@ -0,0 +1,94 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "gem_data_split": {
3
+ "description": " SQuAD2.0 combines the 100,000 questions in SQuAD1.1 with over 50,000 unanswerable questions written adversarially by crowdworkers\n to look similar to answerable ones. To do well on SQuAD2.0, systems must not only answer questions when possible, but\n also determine when no answer is supported by the paragraph and abstain from answering.\n",
4
+ "citation": "@article{2016arXiv160605250R,\n author = {{Rajpurkar}, Pranav and {Zhang}, Jian and {Lopyrev},\n Konstantin and {Liang}, Percy},\n title = \"{SQuAD: 100,000+ Questions for Machine Comprehension of Text}\",\n journal = {arXiv e-prints},\n year = 2016,\n eid = {arXiv:1606.05250},\n pages = {arXiv:1606.05250},\narchivePrefix = {arXiv},\n eprint = {1606.05250},\n}\n",
5
+ "homepage": "https://rajpurkar.github.io/SQuAD-explorer/",
6
+ "license": "CC BY-SA 4.0",
7
+ "features": {
8
+ "gem_id": {
9
+ "dtype": "string",
10
+ "id": null,
11
+ "_type": "Value"
12
+ },
13
+ "id": {
14
+ "dtype": "string",
15
+ "id": null,
16
+ "_type": "Value"
17
+ },
18
+ "title": {
19
+ "dtype": "string",
20
+ "id": null,
21
+ "_type": "Value"
22
+ },
23
+ "context": {
24
+ "dtype": "string",
25
+ "id": null,
26
+ "_type": "Value"
27
+ },
28
+ "question": {
29
+ "dtype": "string",
30
+ "id": null,
31
+ "_type": "Value"
32
+ },
33
+ "target": {
34
+ "dtype": "string",
35
+ "id": null,
36
+ "_type": "Value"
37
+ },
38
+ "references": [
39
+ {
40
+ "dtype": "string",
41
+ "id": null,
42
+ "_type": "Value"
43
+ }
44
+ ],
45
+ "answers": {
46
+ "feature": {
47
+ "text": {
48
+ "dtype": "string",
49
+ "id": null,
50
+ "_type": "Value"
51
+ },
52
+ "answer_start": {
53
+ "dtype": "int32",
54
+ "id": null,
55
+ "_type": "Value"
56
+ }
57
+ },
58
+ "length": -1,
59
+ "id": null,
60
+ "_type": "Sequence"
61
+ }
62
+ },
63
+ "post_processed": null,
64
+ "supervised_keys": null,
65
+ "task_templates": null,
66
+ "builder_name": "zillow",
67
+ "config_name": "sales",
68
+ "version": {
69
+ "version_str": "1.0.0",
70
+ "description": null,
71
+ "major": 1,
72
+ "minor": 0,
73
+ "patch": 0
74
+ },
75
+ "splits": {
76
+ "test": {
77
+ "name": "test",
78
+ "num_bytes": 14716686,
79
+ "num_examples": 13922,
80
+ "dataset_name": "squad_v2"
81
+ }
82
+ },
83
+ "download_checksums": {
84
+ "./test.json": {
85
+ "num_bytes": 15489062,
86
+ "checksum": "0491481fa9f87b853325955a723f4f016a1b2c7c36102e0f1a6d719cef2f2fe7"
87
+ }
88
+ },
89
+ "download_size": 159207580,
90
+ "post_processing_size": null,
91
+ "dataset_size": 150788555,
92
+ "size_in_bytes": 309996135
93
+ }
94
+ }
test-stuff/test.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0491481fa9f87b853325955a723f4f016a1b2c7c36102e0f1a6d719cef2f2fe7
3
+ size 15489062
tester.ipynb ADDED
@@ -0,0 +1,199 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 4,
6
+ "metadata": {},
7
+ "outputs": [],
8
+ "source": [
9
+ "# !pip install datasets\n",
10
+ "\n",
11
+ "from datasets import load_dataset"
12
+ ]
13
+ },
14
+ {
15
+ "cell_type": "code",
16
+ "execution_count": 7,
17
+ "metadata": {},
18
+ "outputs": [
19
+ {
20
+ "name": "stdout",
21
+ "output_type": "stream",
22
+ "text": [
23
+ "home_values_forecasts\n",
24
+ "new_constructions\n",
25
+ "for_sale_listings\n"
26
+ ]
27
+ },
28
+ {
29
+ "name": "stderr",
30
+ "output_type": "stream",
31
+ "text": [
32
+ "Downloading data: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 215M/215M [00:05<00:00, 37.3MB/s] \n",
33
+ "Generating train split: 693661 examples [00:20, 34052.02 examples/s]\n"
34
+ ]
35
+ },
36
+ {
37
+ "name": "stdout",
38
+ "output_type": "stream",
39
+ "text": [
40
+ "rentals\n"
41
+ ]
42
+ },
43
+ {
44
+ "name": "stderr",
45
+ "output_type": "stream",
46
+ "text": [
47
+ "Downloading data: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 413M/413M [00:12<00:00, 34.2MB/s] \n",
48
+ "Generating train split: 1258740 examples [00:28, 44715.39 examples/s]\n"
49
+ ]
50
+ },
51
+ {
52
+ "name": "stdout",
53
+ "output_type": "stream",
54
+ "text": [
55
+ "sales\n"
56
+ ]
57
+ },
58
+ {
59
+ "name": "stderr",
60
+ "output_type": "stream",
61
+ "text": [
62
+ "Downloading data: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 280M/280M [00:06<00:00, 41.1MB/s] \n",
63
+ "Generating train split: 504608 examples [00:19, 25569.29 examples/s]\n"
64
+ ]
65
+ },
66
+ {
67
+ "name": "stdout",
68
+ "output_type": "stream",
69
+ "text": [
70
+ "home_values\n"
71
+ ]
72
+ },
73
+ {
74
+ "name": "stderr",
75
+ "output_type": "stream",
76
+ "text": [
77
+ "Downloading data: 100%|β–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆβ–ˆ| 47.3M/47.3M [00:01<00:00, 29.7MB/s]\n",
78
+ "Generating train split: 117912 examples [00:03, 35540.83 examples/s]\n"
79
+ ]
80
+ },
81
+ {
82
+ "name": "stdout",
83
+ "output_type": "stream",
84
+ "text": [
85
+ "days_on_market\n"
86
+ ]
87
+ },
88
+ {
89
+ "name": "stderr",
90
+ "output_type": "stream",
91
+ "text": [
92
+ "Generating train split: 586714 examples [00:16, 34768.33 examples/s]\n"
93
+ ]
94
+ }
95
+ ],
96
+ "source": [
97
+ "configs = [\n",
98
+ " \"home_values_forecasts\",\n",
99
+ " \"new_construction\",\n",
100
+ " \"for_sale_listings\",\n",
101
+ " \"rentals\",\n",
102
+ " \"sales\",\n",
103
+ " \"home_values\",\n",
104
+ " \"days_on_market\",\n",
105
+ "]\n",
106
+ "for config in configs:\n",
107
+ " print(config)\n",
108
+ " dataset = load_dataset(\"misikoff/zillow\", config, trust_remote_code=True)"
109
+ ]
110
+ },
111
+ {
112
+ "cell_type": "code",
113
+ "execution_count": 7,
114
+ "metadata": {},
115
+ "outputs": [
116
+ {
117
+ "data": {
118
+ "text/plain": [
119
+ "{'Region ID': '102001',\n",
120
+ " 'Size Rank': 0,\n",
121
+ " 'Region': 'United States',\n",
122
+ " 'Region Type': 'country',\n",
123
+ " 'State': None,\n",
124
+ " 'Home Type': 'SFR',\n",
125
+ " 'Date': '2015-01-31',\n",
126
+ " 'Rent (Smoothed)': 1251.1195068359375,\n",
127
+ " 'Rent (Smoothed) (Seasonally Adjusted)': 1253.3807373046875}"
128
+ ]
129
+ },
130
+ "execution_count": 7,
131
+ "metadata": {},
132
+ "output_type": "execute_result"
133
+ }
134
+ ],
135
+ "source": [
136
+ "next(iter((dataset[\"train\"])))"
137
+ ]
138
+ },
139
+ {
140
+ "cell_type": "code",
141
+ "execution_count": 8,
142
+ "metadata": {},
143
+ "outputs": [],
144
+ "source": [
145
+ "gen = iter((dataset[\"train\"]))"
146
+ ]
147
+ },
148
+ {
149
+ "cell_type": "code",
150
+ "execution_count": 37,
151
+ "metadata": {},
152
+ "outputs": [
153
+ {
154
+ "data": {
155
+ "text/plain": [
156
+ "{'Region ID': '102001',\n",
157
+ " 'Size Rank': 0,\n",
158
+ " 'Region': 'United States',\n",
159
+ " 'Region Type': 'country',\n",
160
+ " 'State': None,\n",
161
+ " 'Home Type': 'condo/co-op only',\n",
162
+ " 'Date': '2018-03-31',\n",
163
+ " 'Sale Price': 386700.0,\n",
164
+ " 'Sale Price per Sqft': 238.31776428222656,\n",
165
+ " 'Count': 4267}"
166
+ ]
167
+ },
168
+ "execution_count": 37,
169
+ "metadata": {},
170
+ "output_type": "execute_result"
171
+ }
172
+ ],
173
+ "source": [
174
+ "next(gen)"
175
+ ]
176
+ }
177
+ ],
178
+ "metadata": {
179
+ "kernelspec": {
180
+ "display_name": "sta663",
181
+ "language": "python",
182
+ "name": "python3"
183
+ },
184
+ "language_info": {
185
+ "codemirror_mode": {
186
+ "name": "ipython",
187
+ "version": 3
188
+ },
189
+ "file_extension": ".py",
190
+ "mimetype": "text/x-python",
191
+ "name": "python",
192
+ "nbconvert_exporter": "python",
193
+ "pygments_lexer": "ipython3",
194
+ "version": "3.12.2"
195
+ }
196
+ },
197
+ "nbformat": 4,
198
+ "nbformat_minor": 2
199
+ }