misikoff commited on
Commit
f2ba714
1 Parent(s): 1316a3d

fix: update python files to use set homes

Browse files
processors/days_on_market.ipynb CHANGED
@@ -2,7 +2,7 @@
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
- "execution_count": 1,
6
  "metadata": {},
7
  "outputs": [],
8
  "source": [
@@ -13,12 +13,13 @@
13
  " get_combined_df,\n",
14
  " save_final_df_as_jsonl,\n",
15
  " handle_slug_column_mappings,\n",
 
16
  ")"
17
  ]
18
  },
19
  {
20
  "cell_type": "code",
21
- "execution_count": 2,
22
  "metadata": {},
23
  "outputs": [],
24
  "source": [
@@ -31,247 +32,65 @@
31
  },
32
  {
33
  "cell_type": "code",
34
- "execution_count": 3,
35
  "metadata": {},
36
  "outputs": [
37
  {
38
- "data": {
39
- "text/html": [
40
- "<div>\n",
41
- "<style scoped>\n",
42
- " .dataframe tbody tr th:only-of-type {\n",
43
- " vertical-align: middle;\n",
44
- " }\n",
45
- "\n",
46
- " .dataframe tbody tr th {\n",
47
- " vertical-align: top;\n",
48
- " }\n",
49
- "\n",
50
- " .dataframe thead th {\n",
51
- " text-align: right;\n",
52
- " }\n",
53
- "</style>\n",
54
- "<table border=\"1\" class=\"dataframe\">\n",
55
- " <thead>\n",
56
- " <tr style=\"text-align: right;\">\n",
57
- " <th></th>\n",
58
- " <th>RegionID</th>\n",
59
- " <th>SizeRank</th>\n",
60
- " <th>RegionName</th>\n",
61
- " <th>RegionType</th>\n",
62
- " <th>StateName</th>\n",
63
- " <th>Home Type</th>\n",
64
- " <th>Date</th>\n",
65
- " <th>Mean Listings Price Cut Amount (Smoothed)</th>\n",
66
- " <th>Percent Listings Price Cut</th>\n",
67
- " <th>Mean Listings Price Cut Amount</th>\n",
68
- " <th>Percent Listings Price Cut (Smoothed)</th>\n",
69
- " <th>Median Days on Pending (Smoothed)</th>\n",
70
- " <th>Median Days on Pending</th>\n",
71
- " </tr>\n",
72
- " </thead>\n",
73
- " <tbody>\n",
74
- " <tr>\n",
75
- " <th>0</th>\n",
76
- " <td>102001</td>\n",
77
- " <td>0</td>\n",
78
- " <td>United States</td>\n",
79
- " <td>country</td>\n",
80
- " <td>NaN</td>\n",
81
- " <td>SFR</td>\n",
82
- " <td>2018-01-06</td>\n",
83
- " <td>NaN</td>\n",
84
- " <td>NaN</td>\n",
85
- " <td>13508.368375</td>\n",
86
- " <td>NaN</td>\n",
87
- " <td>NaN</td>\n",
88
- " <td>NaN</td>\n",
89
- " </tr>\n",
90
- " <tr>\n",
91
- " <th>1</th>\n",
92
- " <td>102001</td>\n",
93
- " <td>0</td>\n",
94
- " <td>United States</td>\n",
95
- " <td>country</td>\n",
96
- " <td>NaN</td>\n",
97
- " <td>SFR</td>\n",
98
- " <td>2018-01-13</td>\n",
99
- " <td>NaN</td>\n",
100
- " <td>0.049042</td>\n",
101
- " <td>14114.788383</td>\n",
102
- " <td>NaN</td>\n",
103
- " <td>NaN</td>\n",
104
- " <td>NaN</td>\n",
105
- " </tr>\n",
106
- " <tr>\n",
107
- " <th>2</th>\n",
108
- " <td>102001</td>\n",
109
- " <td>0</td>\n",
110
- " <td>United States</td>\n",
111
- " <td>country</td>\n",
112
- " <td>NaN</td>\n",
113
- " <td>SFR</td>\n",
114
- " <td>2018-01-20</td>\n",
115
- " <td>NaN</td>\n",
116
- " <td>0.044740</td>\n",
117
- " <td>14326.128956</td>\n",
118
- " <td>NaN</td>\n",
119
- " <td>NaN</td>\n",
120
- " <td>NaN</td>\n",
121
- " </tr>\n",
122
- " <tr>\n",
123
- " <th>3</th>\n",
124
- " <td>102001</td>\n",
125
- " <td>0</td>\n",
126
- " <td>United States</td>\n",
127
- " <td>country</td>\n",
128
- " <td>NaN</td>\n",
129
- " <td>SFR</td>\n",
130
- " <td>2018-01-27</td>\n",
131
- " <td>13998.585612</td>\n",
132
- " <td>0.047930</td>\n",
133
- " <td>13998.585612</td>\n",
134
- " <td>NaN</td>\n",
135
- " <td>NaN</td>\n",
136
- " <td>NaN</td>\n",
137
- " </tr>\n",
138
- " <tr>\n",
139
- " <th>4</th>\n",
140
- " <td>102001</td>\n",
141
- " <td>0</td>\n",
142
- " <td>United States</td>\n",
143
- " <td>country</td>\n",
144
- " <td>NaN</td>\n",
145
- " <td>SFR</td>\n",
146
- " <td>2018-02-03</td>\n",
147
- " <td>14120.035549</td>\n",
148
- " <td>0.047622</td>\n",
149
- " <td>14120.035549</td>\n",
150
- " <td>0.047622</td>\n",
151
- " <td>NaN</td>\n",
152
- " <td>NaN</td>\n",
153
- " </tr>\n",
154
- " <tr>\n",
155
- " <th>...</th>\n",
156
- " <td>...</td>\n",
157
- " <td>...</td>\n",
158
- " <td>...</td>\n",
159
- " <td>...</td>\n",
160
- " <td>...</td>\n",
161
- " <td>...</td>\n",
162
- " <td>...</td>\n",
163
- " <td>...</td>\n",
164
- " <td>...</td>\n",
165
- " <td>...</td>\n",
166
- " <td>...</td>\n",
167
- " <td>...</td>\n",
168
- " <td>...</td>\n",
169
- " </tr>\n",
170
- " <tr>\n",
171
- " <th>586709</th>\n",
172
- " <td>845172</td>\n",
173
- " <td>769</td>\n",
174
- " <td>Winfield, KS</td>\n",
175
- " <td>msa</td>\n",
176
- " <td>KS</td>\n",
177
- " <td>all homes (SFR + Condo)</td>\n",
178
- " <td>2024-01-06</td>\n",
179
- " <td>NaN</td>\n",
180
- " <td>0.094017</td>\n",
181
- " <td>NaN</td>\n",
182
- " <td>0.037378</td>\n",
183
- " <td>NaN</td>\n",
184
- " <td>NaN</td>\n",
185
- " </tr>\n",
186
- " <tr>\n",
187
- " <th>586710</th>\n",
188
- " <td>845172</td>\n",
189
- " <td>769</td>\n",
190
- " <td>Winfield, KS</td>\n",
191
- " <td>msa</td>\n",
192
- " <td>KS</td>\n",
193
- " <td>all homes (SFR + Condo)</td>\n",
194
- " <td>2024-01-13</td>\n",
195
- " <td>NaN</td>\n",
196
- " <td>0.070175</td>\n",
197
- " <td>NaN</td>\n",
198
- " <td>0.043203</td>\n",
199
- " <td>NaN</td>\n",
200
- " <td>NaN</td>\n",
201
- " </tr>\n",
202
- " <tr>\n",
203
- " <th>586711</th>\n",
204
- " <td>845172</td>\n",
205
- " <td>769</td>\n",
206
- " <td>Winfield, KS</td>\n",
207
- " <td>msa</td>\n",
208
- " <td>KS</td>\n",
209
- " <td>all homes (SFR + Condo)</td>\n",
210
- " <td>2024-01-20</td>\n",
211
- " <td>NaN</td>\n",
212
- " <td>0.043478</td>\n",
213
- " <td>NaN</td>\n",
214
- " <td>0.054073</td>\n",
215
- " <td>NaN</td>\n",
216
- " <td>NaN</td>\n",
217
- " </tr>\n",
218
- " <tr>\n",
219
- " <th>586712</th>\n",
220
- " <td>845172</td>\n",
221
- " <td>769</td>\n",
222
- " <td>Winfield, KS</td>\n",
223
- " <td>msa</td>\n",
224
- " <td>KS</td>\n",
225
- " <td>all homes (SFR + Condo)</td>\n",
226
- " <td>2024-01-27</td>\n",
227
- " <td>NaN</td>\n",
228
- " <td>0.036697</td>\n",
229
- " <td>NaN</td>\n",
230
- " <td>0.061092</td>\n",
231
- " <td>NaN</td>\n",
232
- " <td>NaN</td>\n",
233
- " </tr>\n",
234
- " <tr>\n",
235
- " <th>586713</th>\n",
236
- " <td>845172</td>\n",
237
- " <td>769</td>\n",
238
- " <td>Winfield, KS</td>\n",
239
- " <td>msa</td>\n",
240
- " <td>KS</td>\n",
241
- " <td>all homes (SFR + Condo)</td>\n",
242
- " <td>2024-02-03</td>\n",
243
- " <td>NaN</td>\n",
244
- " <td>0.077670</td>\n",
245
- " <td>NaN</td>\n",
246
- " <td>0.057005</td>\n",
247
- " <td>NaN</td>\n",
248
- " <td>NaN</td>\n",
249
- " </tr>\n",
250
- " </tbody>\n",
251
- "</table>\n",
252
- "<p>586714 rows × 13 columns</p>\n",
253
- "</div>"
254
- ],
255
- "text/plain": [
256
- " RegionID ... Median Days on Pending\n",
257
- "0 102001 ... NaN\n",
258
- "1 102001 ... NaN\n",
259
- "2 102001 ... NaN\n",
260
- "3 102001 ... NaN\n",
261
- "4 102001 ... NaN\n",
262
- "... ... ... ...\n",
263
- "586709 845172 ... NaN\n",
264
- "586710 845172 ... NaN\n",
265
- "586711 845172 ... NaN\n",
266
- "586712 845172 ... NaN\n",
267
- "586713 845172 ... NaN\n",
268
- "\n",
269
- "[586714 rows x 13 columns]"
270
- ]
271
- },
272
- "execution_count": 3,
273
- "metadata": {},
274
- "output_type": "execute_result"
275
  }
276
  ],
277
  "source": [
@@ -303,12 +122,8 @@
303
  "\n",
304
  " cur_df = pd.read_csv(os.path.join(FULL_DATA_DIR_PATH, filename))\n",
305
  "\n",
306
- " if \"_uc_sfrcondo_\" in filename:\n",
307
- " cur_df[\"Home Type\"] = \"all homes (SFR + Condo)\"\n",
308
- " # change column type to string\n",
309
- " cur_df[\"RegionName\"] = cur_df[\"RegionName\"].astype(str)\n",
310
- " elif \"_uc_sfr_\" in filename:\n",
311
- " cur_df[\"Home Type\"] = \"SFR\"\n",
312
  "\n",
313
  " data_frames = handle_slug_column_mappings(\n",
314
  " data_frames, slug_column_mappings, exclude_columns, filename, cur_df\n",
@@ -333,7 +148,7 @@
333
  },
334
  {
335
  "cell_type": "code",
336
- "execution_count": 9,
337
  "metadata": {},
338
  "outputs": [
339
  {
@@ -364,10 +179,10 @@
364
  " <th>State</th>\n",
365
  " <th>Home Type</th>\n",
366
  " <th>Date</th>\n",
367
- " <th>Mean Listings Price Cut Amount (Smoothed)</th>\n",
368
  " <th>Percent Listings Price Cut</th>\n",
369
  " <th>Mean Listings Price Cut Amount</th>\n",
370
  " <th>Percent Listings Price Cut (Smoothed)</th>\n",
 
371
  " <th>Median Days on Pending (Smoothed)</th>\n",
372
  " <th>Median Days on Pending</th>\n",
373
  " </tr>\n",
@@ -383,11 +198,11 @@
383
  " <td>SFR</td>\n",
384
  " <td>2018-01-06</td>\n",
385
  " <td>NaN</td>\n",
386
- " <td>NaN</td>\n",
387
  " <td>13508.368375</td>\n",
388
  " <td>NaN</td>\n",
389
  " <td>NaN</td>\n",
390
  " <td>NaN</td>\n",
 
391
  " </tr>\n",
392
  " <tr>\n",
393
  " <th>1</th>\n",
@@ -398,12 +213,12 @@
398
  " <td>NaN</td>\n",
399
  " <td>SFR</td>\n",
400
  " <td>2018-01-13</td>\n",
401
- " <td>NaN</td>\n",
402
  " <td>0.049042</td>\n",
403
  " <td>14114.788383</td>\n",
404
  " <td>NaN</td>\n",
405
  " <td>NaN</td>\n",
406
  " <td>NaN</td>\n",
 
407
  " </tr>\n",
408
  " <tr>\n",
409
  " <th>2</th>\n",
@@ -414,12 +229,12 @@
414
  " <td>NaN</td>\n",
415
  " <td>SFR</td>\n",
416
  " <td>2018-01-20</td>\n",
417
- " <td>NaN</td>\n",
418
  " <td>0.044740</td>\n",
419
  " <td>14326.128956</td>\n",
420
  " <td>NaN</td>\n",
421
  " <td>NaN</td>\n",
422
  " <td>NaN</td>\n",
 
423
  " </tr>\n",
424
  " <tr>\n",
425
  " <th>3</th>\n",
@@ -430,10 +245,10 @@
430
  " <td>NaN</td>\n",
431
  " <td>SFR</td>\n",
432
  " <td>2018-01-27</td>\n",
433
- " <td>13998.585612</td>\n",
434
  " <td>0.047930</td>\n",
435
  " <td>13998.585612</td>\n",
436
  " <td>NaN</td>\n",
 
437
  " <td>NaN</td>\n",
438
  " <td>NaN</td>\n",
439
  " </tr>\n",
@@ -446,10 +261,10 @@
446
  " <td>NaN</td>\n",
447
  " <td>SFR</td>\n",
448
  " <td>2018-02-03</td>\n",
449
- " <td>14120.035549</td>\n",
450
  " <td>0.047622</td>\n",
451
  " <td>14120.035549</td>\n",
452
  " <td>0.047622</td>\n",
 
453
  " <td>NaN</td>\n",
454
  " <td>NaN</td>\n",
455
  " </tr>\n",
@@ -476,14 +291,14 @@
476
  " <td>Winfield, KS</td>\n",
477
  " <td>msa</td>\n",
478
  " <td>KS</td>\n",
479
- " <td>all homes (SFR + Condo)</td>\n",
480
  " <td>2024-01-06</td>\n",
481
- " <td>NaN</td>\n",
482
  " <td>0.094017</td>\n",
483
  " <td>NaN</td>\n",
484
  " <td>0.037378</td>\n",
485
  " <td>NaN</td>\n",
486
  " <td>NaN</td>\n",
 
487
  " </tr>\n",
488
  " <tr>\n",
489
  " <th>586710</th>\n",
@@ -492,14 +307,14 @@
492
  " <td>Winfield, KS</td>\n",
493
  " <td>msa</td>\n",
494
  " <td>KS</td>\n",
495
- " <td>all homes (SFR + Condo)</td>\n",
496
  " <td>2024-01-13</td>\n",
497
- " <td>NaN</td>\n",
498
  " <td>0.070175</td>\n",
499
  " <td>NaN</td>\n",
500
  " <td>0.043203</td>\n",
501
  " <td>NaN</td>\n",
502
  " <td>NaN</td>\n",
 
503
  " </tr>\n",
504
  " <tr>\n",
505
  " <th>586711</th>\n",
@@ -508,14 +323,14 @@
508
  " <td>Winfield, KS</td>\n",
509
  " <td>msa</td>\n",
510
  " <td>KS</td>\n",
511
- " <td>all homes (SFR + Condo)</td>\n",
512
  " <td>2024-01-20</td>\n",
513
- " <td>NaN</td>\n",
514
  " <td>0.043478</td>\n",
515
  " <td>NaN</td>\n",
516
  " <td>0.054073</td>\n",
517
  " <td>NaN</td>\n",
518
  " <td>NaN</td>\n",
 
519
  " </tr>\n",
520
  " <tr>\n",
521
  " <th>586712</th>\n",
@@ -524,14 +339,14 @@
524
  " <td>Winfield, KS</td>\n",
525
  " <td>msa</td>\n",
526
  " <td>KS</td>\n",
527
- " <td>all homes (SFR + Condo)</td>\n",
528
  " <td>2024-01-27</td>\n",
529
- " <td>NaN</td>\n",
530
  " <td>0.036697</td>\n",
531
  " <td>NaN</td>\n",
532
  " <td>0.061092</td>\n",
533
  " <td>NaN</td>\n",
534
  " <td>NaN</td>\n",
 
535
  " </tr>\n",
536
  " <tr>\n",
537
  " <th>586713</th>\n",
@@ -540,14 +355,14 @@
540
  " <td>Winfield, KS</td>\n",
541
  " <td>msa</td>\n",
542
  " <td>KS</td>\n",
543
- " <td>all homes (SFR + Condo)</td>\n",
544
  " <td>2024-02-03</td>\n",
545
- " <td>NaN</td>\n",
546
  " <td>0.077670</td>\n",
547
  " <td>NaN</td>\n",
548
  " <td>0.057005</td>\n",
549
  " <td>NaN</td>\n",
550
  " <td>NaN</td>\n",
 
551
  " </tr>\n",
552
  " </tbody>\n",
553
  "</table>\n",
@@ -555,57 +370,57 @@
555
  "</div>"
556
  ],
557
  "text/plain": [
558
- " Region ID Size Rank Region Region Type State \\\n",
559
- "0 102001 0 United States country NaN \n",
560
- "1 102001 0 United States country NaN \n",
561
- "2 102001 0 United States country NaN \n",
562
- "3 102001 0 United States country NaN \n",
563
- "4 102001 0 United States country NaN \n",
564
- "... ... ... ... ... ... \n",
565
- "586709 845172 769 Winfield, KS msa KS \n",
566
- "586710 845172 769 Winfield, KS msa KS \n",
567
- "586711 845172 769 Winfield, KS msa KS \n",
568
- "586712 845172 769 Winfield, KS msa KS \n",
569
- "586713 845172 769 Winfield, KS msa KS \n",
570
  "\n",
571
- " Home Type Date \\\n",
572
- "0 SFR 2018-01-06 \n",
573
- "1 SFR 2018-01-13 \n",
574
- "2 SFR 2018-01-20 \n",
575
- "3 SFR 2018-01-27 \n",
576
- "4 SFR 2018-02-03 \n",
577
- "... ... ... \n",
578
- "586709 all homes (SFR + Condo) 2024-01-06 \n",
579
- "586710 all homes (SFR + Condo) 2024-01-13 \n",
580
- "586711 all homes (SFR + Condo) 2024-01-20 \n",
581
- "586712 all homes (SFR + Condo) 2024-01-27 \n",
582
- "586713 all homes (SFR + Condo) 2024-02-03 \n",
583
  "\n",
584
- " Mean Listings Price Cut Amount (Smoothed) Percent Listings Price Cut \\\n",
585
- "0 NaN NaN \n",
586
- "1 NaN 0.049042 \n",
587
- "2 NaN 0.044740 \n",
588
- "3 13998.585612 0.047930 \n",
589
- "4 14120.035549 0.047622 \n",
590
- "... ... ... \n",
591
- "586709 NaN 0.094017 \n",
592
- "586710 NaN 0.070175 \n",
593
- "586711 NaN 0.043478 \n",
594
- "586712 NaN 0.036697 \n",
595
- "586713 NaN 0.077670 \n",
596
  "\n",
597
- " Mean Listings Price Cut Amount Percent Listings Price Cut (Smoothed) \\\n",
598
- "0 13508.368375 NaN \n",
599
- "1 14114.788383 NaN \n",
600
- "2 14326.128956 NaN \n",
601
- "3 13998.585612 NaN \n",
602
- "4 14120.035549 0.047622 \n",
603
- "... ... ... \n",
604
- "586709 NaN 0.037378 \n",
605
- "586710 NaN 0.043203 \n",
606
- "586711 NaN 0.054073 \n",
607
- "586712 NaN 0.061092 \n",
608
- "586713 NaN 0.057005 \n",
609
  "\n",
610
  " Median Days on Pending (Smoothed) Median Days on Pending \n",
611
  "0 NaN NaN \n",
@@ -623,7 +438,7 @@
623
  "[586714 rows x 13 columns]"
624
  ]
625
  },
626
- "execution_count": 9,
627
  "metadata": {},
628
  "output_type": "execute_result"
629
  }
@@ -640,6 +455,8 @@
640
  " }\n",
641
  ")\n",
642
  "\n",
 
 
643
  "final_df"
644
  ]
645
  },
 
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
+ "execution_count": 6,
6
  "metadata": {},
7
  "outputs": [],
8
  "source": [
 
13
  " get_combined_df,\n",
14
  " save_final_df_as_jsonl,\n",
15
  " handle_slug_column_mappings,\n",
16
+ " set_home_type,\n",
17
  ")"
18
  ]
19
  },
20
  {
21
  "cell_type": "code",
22
+ "execution_count": 7,
23
  "metadata": {},
24
  "outputs": [],
25
  "source": [
 
32
  },
33
  {
34
  "cell_type": "code",
35
+ "execution_count": 8,
36
  "metadata": {},
37
  "outputs": [
38
  {
39
+ "name": "stdout",
40
+ "output_type": "stream",
41
+ "text": [
42
+ "processing Metro_med_listings_price_cut_amt_uc_sfr_month.csv\n",
43
+ "processing Metro_perc_listings_price_cut_uc_sfr_week.csv\n",
44
+ "processing Metro_med_listings_price_cut_amt_uc_sfrcondo_month.csv\n",
45
+ "processing Metro_med_listings_price_cut_amt_uc_sfr_week.csv\n",
46
+ "processing Metro_med_doz_pending_uc_sfrcondo_month.csv\n",
47
+ "processing Metro_mean_listings_price_cut_amt_uc_sfr_sm_month.csv\n",
48
+ "processing Metro_med_listings_price_cut_perc_uc_sfrcondo_sm_month.csv\n",
49
+ "processing Metro_mean_days_to_close_uc_sfrcondo_week.csv\n",
50
+ "processing Metro_mean_days_to_close_uc_sfrcondo_month.csv\n",
51
+ "processing Metro_mean_listings_price_cut_amt_uc_sfrcondo_sm_month.csv\n",
52
+ "processing Metro_med_listings_price_cut_perc_uc_sfr_week.csv\n",
53
+ "processing Metro_median_days_to_close_uc_sfrcondo_sm_week.csv\n",
54
+ "processing Metro_med_listings_price_cut_perc_uc_sfr_sm_week.csv\n",
55
+ "processing Metro_mean_listings_price_cut_perc_uc_sfrcondo_sm_week.csv\n",
56
+ "processing Metro_perc_listings_price_cut_uc_sfrcondo_week.csv\n",
57
+ "processing Metro_med_doz_pending_uc_sfrcondo_sm_month.csv\n",
58
+ "processing Metro_mean_days_to_close_uc_sfrcondo_sm_week.csv\n",
59
+ "processing Metro_med_listings_price_cut_perc_uc_sfrcondo_week.csv\n",
60
+ "processing Metro_mean_listings_price_cut_amt_uc_sfr_week.csv\n",
61
+ "processing Metro_med_listings_price_cut_perc_uc_sfrcondo_month.csv\n",
62
+ "processing Metro_mean_doz_pending_uc_sfrcondo_week.csv\n",
63
+ "processing Metro_mean_listings_price_cut_amt_uc_sfrcondo_week.csv\n",
64
+ "processing Metro_median_days_to_close_uc_sfrcondo_week.csv\n",
65
+ "processing Metro_med_listings_price_cut_amt_uc_sfr_sm_month.csv\n",
66
+ "processing Metro_mean_doz_pending_uc_sfrcondo_sm_month.csv\n",
67
+ "processing Metro_med_listings_price_cut_perc_uc_sfr_sm_month.csv\n",
68
+ "processing Metro_perc_listings_price_cut_uc_sfrcondo_sm_week.csv\n",
69
+ "processing Metro_median_days_to_close_uc_sfrcondo_sm_month.csv\n",
70
+ "processing Metro_med_listings_price_cut_perc_uc_sfr_month.csv\n",
71
+ "processing Metro_mean_listings_price_cut_perc_uc_sfrcondo_week.csv\n",
72
+ "processing Metro_med_listings_price_cut_amt_uc_sfrcondo_week.csv\n",
73
+ "processing Metro_med_listings_price_cut_amt_uc_sfrcondo_sm_week.csv\n",
74
+ "processing Metro_mean_days_to_close_uc_sfrcondo_sm_month.csv\n",
75
+ "processing Metro_med_listings_price_cut_amt_uc_sfr_sm_week.csv\n",
76
+ "processing Metro_mean_doz_pending_uc_sfrcondo_sm_week.csv\n",
77
+ "processing Metro_mean_listings_price_cut_amt_uc_sfrcondo_sm_week.csv\n",
78
+ "processing Metro_mean_listings_price_cut_amt_uc_sfr_sm_week.csv\n",
79
+ "processing Metro_perc_listings_price_cut_uc_sfrcondo_sm_month.csv\n",
80
+ "processing Metro_mean_listings_price_cut_amt_uc_sfrcondo_month.csv\n",
81
+ "processing Metro_med_listings_price_cut_amt_uc_sfrcondo_sm_month.csv\n",
82
+ "processing Metro_med_doz_pending_uc_sfrcondo_sm_week.csv\n",
83
+ "processing Metro_med_listings_price_cut_perc_uc_sfrcondo_sm_week.csv\n",
84
+ "processing Metro_perc_listings_price_cut_uc_sfr_month.csv\n",
85
+ "processing Metro_med_doz_pending_uc_sfrcondo_week.csv\n",
86
+ "processing Metro_mean_listings_price_cut_perc_uc_sfrcondo_sm_month.csv\n",
87
+ "processing Metro_perc_listings_price_cut_uc_sfr_sm_month.csv\n",
88
+ "processing Metro_median_days_to_close_uc_sfrcondo_month.csv\n",
89
+ "processing Metro_perc_listings_price_cut_uc_sfr_sm_week.csv\n",
90
+ "processing Metro_mean_listings_price_cut_perc_uc_sfrcondo_month.csv\n",
91
+ "processing Metro_mean_listings_price_cut_amt_uc_sfr_month.csv\n",
92
+ "processing Metro_mean_doz_pending_uc_sfrcondo_month.csv\n"
93
+ ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
94
  }
95
  ],
96
  "source": [
 
122
  "\n",
123
  " cur_df = pd.read_csv(os.path.join(FULL_DATA_DIR_PATH, filename))\n",
124
  "\n",
125
+ " cur_df[\"RegionName\"] = cur_df[\"RegionName\"].astype(str)\n",
126
+ " cur_df = set_home_type(cur_df, filename)\n",
 
 
 
 
127
  "\n",
128
  " data_frames = handle_slug_column_mappings(\n",
129
  " data_frames, slug_column_mappings, exclude_columns, filename, cur_df\n",
 
148
  },
149
  {
150
  "cell_type": "code",
151
+ "execution_count": 4,
152
  "metadata": {},
153
  "outputs": [
154
  {
 
179
  " <th>State</th>\n",
180
  " <th>Home Type</th>\n",
181
  " <th>Date</th>\n",
 
182
  " <th>Percent Listings Price Cut</th>\n",
183
  " <th>Mean Listings Price Cut Amount</th>\n",
184
  " <th>Percent Listings Price Cut (Smoothed)</th>\n",
185
+ " <th>Mean Listings Price Cut Amount (Smoothed)</th>\n",
186
  " <th>Median Days on Pending (Smoothed)</th>\n",
187
  " <th>Median Days on Pending</th>\n",
188
  " </tr>\n",
 
198
  " <td>SFR</td>\n",
199
  " <td>2018-01-06</td>\n",
200
  " <td>NaN</td>\n",
 
201
  " <td>13508.368375</td>\n",
202
  " <td>NaN</td>\n",
203
  " <td>NaN</td>\n",
204
  " <td>NaN</td>\n",
205
+ " <td>NaN</td>\n",
206
  " </tr>\n",
207
  " <tr>\n",
208
  " <th>1</th>\n",
 
213
  " <td>NaN</td>\n",
214
  " <td>SFR</td>\n",
215
  " <td>2018-01-13</td>\n",
 
216
  " <td>0.049042</td>\n",
217
  " <td>14114.788383</td>\n",
218
  " <td>NaN</td>\n",
219
  " <td>NaN</td>\n",
220
  " <td>NaN</td>\n",
221
+ " <td>NaN</td>\n",
222
  " </tr>\n",
223
  " <tr>\n",
224
  " <th>2</th>\n",
 
229
  " <td>NaN</td>\n",
230
  " <td>SFR</td>\n",
231
  " <td>2018-01-20</td>\n",
 
232
  " <td>0.044740</td>\n",
233
  " <td>14326.128956</td>\n",
234
  " <td>NaN</td>\n",
235
  " <td>NaN</td>\n",
236
  " <td>NaN</td>\n",
237
+ " <td>NaN</td>\n",
238
  " </tr>\n",
239
  " <tr>\n",
240
  " <th>3</th>\n",
 
245
  " <td>NaN</td>\n",
246
  " <td>SFR</td>\n",
247
  " <td>2018-01-27</td>\n",
 
248
  " <td>0.047930</td>\n",
249
  " <td>13998.585612</td>\n",
250
  " <td>NaN</td>\n",
251
+ " <td>13998.585612</td>\n",
252
  " <td>NaN</td>\n",
253
  " <td>NaN</td>\n",
254
  " </tr>\n",
 
261
  " <td>NaN</td>\n",
262
  " <td>SFR</td>\n",
263
  " <td>2018-02-03</td>\n",
 
264
  " <td>0.047622</td>\n",
265
  " <td>14120.035549</td>\n",
266
  " <td>0.047622</td>\n",
267
+ " <td>14120.035549</td>\n",
268
  " <td>NaN</td>\n",
269
  " <td>NaN</td>\n",
270
  " </tr>\n",
 
291
  " <td>Winfield, KS</td>\n",
292
  " <td>msa</td>\n",
293
  " <td>KS</td>\n",
294
+ " <td>all homes</td>\n",
295
  " <td>2024-01-06</td>\n",
 
296
  " <td>0.094017</td>\n",
297
  " <td>NaN</td>\n",
298
  " <td>0.037378</td>\n",
299
  " <td>NaN</td>\n",
300
  " <td>NaN</td>\n",
301
+ " <td>NaN</td>\n",
302
  " </tr>\n",
303
  " <tr>\n",
304
  " <th>586710</th>\n",
 
307
  " <td>Winfield, KS</td>\n",
308
  " <td>msa</td>\n",
309
  " <td>KS</td>\n",
310
+ " <td>all homes</td>\n",
311
  " <td>2024-01-13</td>\n",
 
312
  " <td>0.070175</td>\n",
313
  " <td>NaN</td>\n",
314
  " <td>0.043203</td>\n",
315
  " <td>NaN</td>\n",
316
  " <td>NaN</td>\n",
317
+ " <td>NaN</td>\n",
318
  " </tr>\n",
319
  " <tr>\n",
320
  " <th>586711</th>\n",
 
323
  " <td>Winfield, KS</td>\n",
324
  " <td>msa</td>\n",
325
  " <td>KS</td>\n",
326
+ " <td>all homes</td>\n",
327
  " <td>2024-01-20</td>\n",
 
328
  " <td>0.043478</td>\n",
329
  " <td>NaN</td>\n",
330
  " <td>0.054073</td>\n",
331
  " <td>NaN</td>\n",
332
  " <td>NaN</td>\n",
333
+ " <td>NaN</td>\n",
334
  " </tr>\n",
335
  " <tr>\n",
336
  " <th>586712</th>\n",
 
339
  " <td>Winfield, KS</td>\n",
340
  " <td>msa</td>\n",
341
  " <td>KS</td>\n",
342
+ " <td>all homes</td>\n",
343
  " <td>2024-01-27</td>\n",
 
344
  " <td>0.036697</td>\n",
345
  " <td>NaN</td>\n",
346
  " <td>0.061092</td>\n",
347
  " <td>NaN</td>\n",
348
  " <td>NaN</td>\n",
349
+ " <td>NaN</td>\n",
350
  " </tr>\n",
351
  " <tr>\n",
352
  " <th>586713</th>\n",
 
355
  " <td>Winfield, KS</td>\n",
356
  " <td>msa</td>\n",
357
  " <td>KS</td>\n",
358
+ " <td>all homes</td>\n",
359
  " <td>2024-02-03</td>\n",
 
360
  " <td>0.077670</td>\n",
361
  " <td>NaN</td>\n",
362
  " <td>0.057005</td>\n",
363
  " <td>NaN</td>\n",
364
  " <td>NaN</td>\n",
365
+ " <td>NaN</td>\n",
366
  " </tr>\n",
367
  " </tbody>\n",
368
  "</table>\n",
 
370
  "</div>"
371
  ],
372
  "text/plain": [
373
+ " Region ID Size Rank Region Region Type State Home Type \\\n",
374
+ "0 102001 0 United States country NaN SFR \n",
375
+ "1 102001 0 United States country NaN SFR \n",
376
+ "2 102001 0 United States country NaN SFR \n",
377
+ "3 102001 0 United States country NaN SFR \n",
378
+ "4 102001 0 United States country NaN SFR \n",
379
+ "... ... ... ... ... ... ... \n",
380
+ "586709 845172 769 Winfield, KS msa KS all homes \n",
381
+ "586710 845172 769 Winfield, KS msa KS all homes \n",
382
+ "586711 845172 769 Winfield, KS msa KS all homes \n",
383
+ "586712 845172 769 Winfield, KS msa KS all homes \n",
384
+ "586713 845172 769 Winfield, KS msa KS all homes \n",
385
  "\n",
386
+ " Date Percent Listings Price Cut Mean Listings Price Cut Amount \\\n",
387
+ "0 2018-01-06 NaN 13508.368375 \n",
388
+ "1 2018-01-13 0.049042 14114.788383 \n",
389
+ "2 2018-01-20 0.044740 14326.128956 \n",
390
+ "3 2018-01-27 0.047930 13998.585612 \n",
391
+ "4 2018-02-03 0.047622 14120.035549 \n",
392
+ "... ... ... ... \n",
393
+ "586709 2024-01-06 0.094017 NaN \n",
394
+ "586710 2024-01-13 0.070175 NaN \n",
395
+ "586711 2024-01-20 0.043478 NaN \n",
396
+ "586712 2024-01-27 0.036697 NaN \n",
397
+ "586713 2024-02-03 0.077670 NaN \n",
398
  "\n",
399
+ " Percent Listings Price Cut (Smoothed) \\\n",
400
+ "0 NaN \n",
401
+ "1 NaN \n",
402
+ "2 NaN \n",
403
+ "3 NaN \n",
404
+ "4 0.047622 \n",
405
+ "... ... \n",
406
+ "586709 0.037378 \n",
407
+ "586710 0.043203 \n",
408
+ "586711 0.054073 \n",
409
+ "586712 0.061092 \n",
410
+ "586713 0.057005 \n",
411
  "\n",
412
+ " Mean Listings Price Cut Amount (Smoothed) \\\n",
413
+ "0 NaN \n",
414
+ "1 NaN \n",
415
+ "2 NaN \n",
416
+ "3 13998.585612 \n",
417
+ "4 14120.035549 \n",
418
+ "... ... \n",
419
+ "586709 NaN \n",
420
+ "586710 NaN \n",
421
+ "586711 NaN \n",
422
+ "586712 NaN \n",
423
+ "586713 NaN \n",
424
  "\n",
425
  " Median Days on Pending (Smoothed) Median Days on Pending \n",
426
  "0 NaN NaN \n",
 
438
  "[586714 rows x 13 columns]"
439
  ]
440
  },
441
+ "execution_count": 4,
442
  "metadata": {},
443
  "output_type": "execute_result"
444
  }
 
455
  " }\n",
456
  ")\n",
457
  "\n",
458
+ "final_df[\"Date\"] = pd.to_datetime(final_df[\"Date\"], format=\"%Y-%m-%d\")\n",
459
+ "\n",
460
  "final_df"
461
  ]
462
  },
processors/days_on_market.py ADDED
@@ -0,0 +1,106 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ # coding: utf-8
3
+
4
+ # In[6]:
5
+
6
+
7
+ import pandas as pd
8
+ import os
9
+
10
+ from helpers import (
11
+ get_combined_df,
12
+ save_final_df_as_jsonl,
13
+ handle_slug_column_mappings,
14
+ set_home_type,
15
+ )
16
+
17
+
18
+ # In[7]:
19
+
20
+
21
+ DATA_DIR = "../data"
22
+ PROCESSED_DIR = "../processed/"
23
+ FACET_DIR = "days_on_market/"
24
+ FULL_DATA_DIR_PATH = os.path.join(DATA_DIR, FACET_DIR)
25
+ FULL_PROCESSED_DIR_PATH = os.path.join(PROCESSED_DIR, FACET_DIR)
26
+
27
+
28
+ # In[8]:
29
+
30
+
31
+ data_frames = []
32
+
33
+ exclude_columns = [
34
+ "RegionID",
35
+ "SizeRank",
36
+ "RegionName",
37
+ "RegionType",
38
+ "StateName",
39
+ "Home Type",
40
+ ]
41
+
42
+ slug_column_mappings = {
43
+ "_mean_listings_price_cut_amt_": "Mean Listings Price Cut Amount",
44
+ "_med_doz_pending_": "Median Days on Pending",
45
+ "_median_days_to_pending_": "Median Days to Close",
46
+ "_perc_listings_price_cut_": "Percent Listings Price Cut",
47
+ }
48
+
49
+
50
+ for filename in os.listdir(FULL_DATA_DIR_PATH):
51
+ if filename.endswith(".csv"):
52
+ print("processing " + filename)
53
+ # skip month files for now since they are redundant
54
+ if "month" in filename:
55
+ continue
56
+
57
+ cur_df = pd.read_csv(os.path.join(FULL_DATA_DIR_PATH, filename))
58
+
59
+ cur_df["RegionName"] = cur_df["RegionName"].astype(str)
60
+ cur_df = set_home_type(cur_df, filename)
61
+
62
+ data_frames = handle_slug_column_mappings(
63
+ data_frames, slug_column_mappings, exclude_columns, filename, cur_df
64
+ )
65
+
66
+
67
+ combined_df = get_combined_df(
68
+ data_frames,
69
+ [
70
+ "RegionID",
71
+ "SizeRank",
72
+ "RegionName",
73
+ "RegionType",
74
+ "StateName",
75
+ "Home Type",
76
+ "Date",
77
+ ],
78
+ )
79
+
80
+ combined_df
81
+
82
+
83
+ # In[4]:
84
+
85
+
86
+ # Adjust column names
87
+ final_df = combined_df.rename(
88
+ columns={
89
+ "RegionID": "Region ID",
90
+ "SizeRank": "Size Rank",
91
+ "RegionName": "Region",
92
+ "RegionType": "Region Type",
93
+ "StateName": "State",
94
+ }
95
+ )
96
+
97
+ final_df["Date"] = pd.to_datetime(final_df["Date"], format="%Y-%m-%d")
98
+
99
+ final_df
100
+
101
+
102
+ # In[5]:
103
+
104
+
105
+ save_final_df_as_jsonl(FULL_PROCESSED_DIR_PATH, final_df)
106
+
processors/for_sale_listings.ipynb CHANGED
@@ -13,6 +13,7 @@
13
  " get_combined_df,\n",
14
  " save_final_df_as_jsonl,\n",
15
  " handle_slug_column_mappings,\n",
 
16
  ")"
17
  ]
18
  },
@@ -371,12 +372,7 @@
371
  " if \"month\" in filename:\n",
372
  " continue\n",
373
  "\n",
374
- " if \"sfrcondo\" in filename:\n",
375
- " cur_df[\"Home Type\"] = \"all homes\"\n",
376
- " elif \"sfr\" in filename:\n",
377
- " cur_df[\"Home Type\"] = \"SFR\"\n",
378
- " elif \"condo\" in filename:\n",
379
- " cur_df[\"Home Type\"] = \"condo/co-op only\"\n",
380
  "\n",
381
  " data_frames = handle_slug_column_mappings(\n",
382
  " data_frames, slug_column_mappings, exclude_columns, filename, cur_df\n",
@@ -695,6 +691,8 @@
695
  " }\n",
696
  ")\n",
697
  "\n",
 
 
698
  "final_df"
699
  ]
700
  },
 
13
  " get_combined_df,\n",
14
  " save_final_df_as_jsonl,\n",
15
  " handle_slug_column_mappings,\n",
16
+ " set_home_type,\n",
17
  ")"
18
  ]
19
  },
 
372
  " if \"month\" in filename:\n",
373
  " continue\n",
374
  "\n",
375
+ " cur_df = set_home_type(cur_df, filename)\n",
 
 
 
 
 
376
  "\n",
377
  " data_frames = handle_slug_column_mappings(\n",
378
  " data_frames, slug_column_mappings, exclude_columns, filename, cur_df\n",
 
691
  " }\n",
692
  ")\n",
693
  "\n",
694
+ "final_df[\"Date\"] = pd.to_datetime(final_df[\"Date\"], format=\"%Y-%m-%d\")\n",
695
+ "\n",
696
  "final_df"
697
  ]
698
  },
processors/for_sale_listings.py ADDED
@@ -0,0 +1,104 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ # coding: utf-8
3
+
4
+ # In[1]:
5
+
6
+
7
+ import pandas as pd
8
+ import os
9
+
10
+ from helpers import (
11
+ get_combined_df,
12
+ save_final_df_as_jsonl,
13
+ handle_slug_column_mappings,
14
+ set_home_type,
15
+ )
16
+
17
+
18
+ # In[2]:
19
+
20
+
21
+ DATA_DIR = "../data"
22
+ PROCESSED_DIR = "../processed/"
23
+ FACET_DIR = "for_sale_listings/"
24
+ FULL_DATA_DIR_PATH = os.path.join(DATA_DIR, FACET_DIR)
25
+ FULL_PROCESSED_DIR_PATH = os.path.join(PROCESSED_DIR, FACET_DIR)
26
+
27
+
28
+ # In[3]:
29
+
30
+
31
+ exclude_columns = [
32
+ "RegionID",
33
+ "SizeRank",
34
+ "RegionName",
35
+ "RegionType",
36
+ "StateName",
37
+ "Home Type",
38
+ ]
39
+
40
+ slug_column_mappings = {
41
+ "_mlp_": "Median Listing Price",
42
+ "_new_listings_": "New Listings",
43
+ "new_pending": "New Pending",
44
+ }
45
+
46
+
47
+ data_frames = []
48
+
49
+ for filename in os.listdir(FULL_DATA_DIR_PATH):
50
+ if filename.endswith(".csv"):
51
+ print("processing " + filename)
52
+ cur_df = pd.read_csv(os.path.join(FULL_DATA_DIR_PATH, filename))
53
+
54
+ # ignore monthly data for now since it is redundant
55
+ if "month" in filename:
56
+ continue
57
+
58
+ cur_df = set_home_type(cur_df, filename)
59
+
60
+ data_frames = handle_slug_column_mappings(
61
+ data_frames, slug_column_mappings, exclude_columns, filename, cur_df
62
+ )
63
+
64
+
65
+ combined_df = get_combined_df(
66
+ data_frames,
67
+ [
68
+ "RegionID",
69
+ "SizeRank",
70
+ "RegionName",
71
+ "RegionType",
72
+ "StateName",
73
+ "Home Type",
74
+ "Date",
75
+ ],
76
+ )
77
+
78
+ combined_df
79
+
80
+
81
+ # In[4]:
82
+
83
+
84
+ # Adjust column names
85
+ final_df = combined_df.rename(
86
+ columns={
87
+ "RegionID": "Region ID",
88
+ "SizeRank": "Size Rank",
89
+ "RegionName": "Region",
90
+ "RegionType": "Region Type",
91
+ "StateName": "State",
92
+ }
93
+ )
94
+
95
+ final_df["Date"] = pd.to_datetime(final_df["Date"], format="%Y-%m-%d")
96
+
97
+ final_df
98
+
99
+
100
+ # In[5]:
101
+
102
+
103
+ save_final_df_as_jsonl(FULL_PROCESSED_DIR_PATH, final_df)
104
+
processors/helpers.py CHANGED
@@ -19,6 +19,21 @@ def coalesce_columns(
19
  return combined_df
20
 
21
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
22
  def get_combined_df(data_frames, on):
23
  combined_df = None
24
  if len(data_frames) > 1:
@@ -72,7 +87,7 @@ def save_final_df_as_jsonl(FULL_PROCESSED_DIR_PATH, final_df):
72
  os.makedirs(FULL_PROCESSED_DIR_PATH)
73
 
74
  final_df.to_json(
75
- FULL_PROCESSED_DIR_PATH + "final5.jsonl", orient="records", lines=True
76
  )
77
 
78
 
 
19
  return combined_df
20
 
21
 
22
+ def set_home_type(cur_df, filename):
23
+ if "_sfrcondo_" in filename:
24
+ cur_df["Home Type"] = "all homes"
25
+ if "_sfrcondomfr_" in filename:
26
+ cur_df["Home Type"] = "all homes plus multifamily"
27
+ elif "_sfr_" in filename:
28
+ cur_df["Home Type"] = "SFR"
29
+ elif "_condo_" in filename:
30
+ cur_df["Home Type"] = "condo/co-op"
31
+ elif "_mfr_" in filename:
32
+ cur_df["Home Type"] = "multifamily"
33
+
34
+ return cur_df
35
+
36
+
37
  def get_combined_df(data_frames, on):
38
  combined_df = None
39
  if len(data_frames) > 1:
 
87
  os.makedirs(FULL_PROCESSED_DIR_PATH)
88
 
89
  final_df.to_json(
90
+ FULL_PROCESSED_DIR_PATH + "final.jsonl", orient="records", lines=True
91
  )
92
 
93
 
processors/home_values.ipynb CHANGED
@@ -13,6 +13,7 @@
13
  " get_combined_df,\n",
14
  " save_final_df_as_jsonl,\n",
15
  " handle_slug_column_mappings,\n",
 
16
  ")"
17
  ]
18
  },
@@ -436,12 +437,7 @@
436
  " else:\n",
437
  " cur_df[\"Bedroom Count\"] = \"All Bedrooms\"\n",
438
  "\n",
439
- " if \"_uc_sfr_\" in filename:\n",
440
- " cur_df[\"Home Type\"] = \"SFR\"\n",
441
- " elif \"_uc_sfrcondo_\" in filename:\n",
442
- " cur_df[\"Home Type\"] = \"all homes (SFR/condo)\"\n",
443
- " elif \"_uc_condo_\" in filename:\n",
444
- " cur_df[\"Home Type\"] = \"condo\"\n",
445
  "\n",
446
  " cur_df[\"StateName\"] = cur_df[\"StateName\"].astype(str)\n",
447
  " cur_df[\"RegionName\"] = cur_df[\"RegionName\"].astype(str)\n",
@@ -1401,6 +1397,8 @@
1401
  " }\n",
1402
  ")\n",
1403
  "\n",
 
 
1404
  "final_df"
1405
  ]
1406
  },
 
13
  " get_combined_df,\n",
14
  " save_final_df_as_jsonl,\n",
15
  " handle_slug_column_mappings,\n",
16
+ " set_home_type,\n",
17
  ")"
18
  ]
19
  },
 
437
  " else:\n",
438
  " cur_df[\"Bedroom Count\"] = \"All Bedrooms\"\n",
439
  "\n",
440
+ " cur_df = set_home_type(cur_df, filename)\n",
 
 
 
 
 
441
  "\n",
442
  " cur_df[\"StateName\"] = cur_df[\"StateName\"].astype(str)\n",
443
  " cur_df[\"RegionName\"] = cur_df[\"RegionName\"].astype(str)\n",
 
1397
  " }\n",
1398
  ")\n",
1399
  "\n",
1400
+ "final_df[\"Date\"] = pd.to_datetime(final_df[\"Date\"], format=\"%Y-%m-%d\")\n",
1401
+ "\n",
1402
  "final_df"
1403
  ]
1404
  },
processors/home_values.py ADDED
@@ -0,0 +1,179 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ # coding: utf-8
3
+
4
+ # In[1]:
5
+
6
+
7
+ import pandas as pd
8
+ import os
9
+
10
+ from helpers import (
11
+ get_combined_df,
12
+ save_final_df_as_jsonl,
13
+ handle_slug_column_mappings,
14
+ set_home_type,
15
+ )
16
+
17
+
18
+ # In[2]:
19
+
20
+
21
+ DATA_DIR = "../data"
22
+ PROCESSED_DIR = "../processed/"
23
+ FACET_DIR = "home_values/"
24
+ FULL_DATA_DIR_PATH = os.path.join(DATA_DIR, FACET_DIR)
25
+ FULL_PROCESSED_DIR_PATH = os.path.join(PROCESSED_DIR, FACET_DIR)
26
+
27
+
28
+ # In[5]:
29
+
30
+
31
+ data_frames = []
32
+
33
+ slug_column_mappings = {
34
+ "_tier_0.0_0.33_": "Bottom Tier ZHVI",
35
+ "_tier_0.33_0.67_": "Mid Tier ZHVI",
36
+ "_tier_0.67_1.0_": "Top Tier ZHVI",
37
+ "": "ZHVI",
38
+ }
39
+
40
+ for filename in os.listdir(FULL_DATA_DIR_PATH):
41
+ if filename.endswith(".csv"):
42
+ print("processing " + filename)
43
+ cur_df = pd.read_csv(os.path.join(FULL_DATA_DIR_PATH, filename))
44
+ exclude_columns = [
45
+ "RegionID",
46
+ "SizeRank",
47
+ "RegionName",
48
+ "RegionType",
49
+ "StateName",
50
+ "Bedroom Count",
51
+ "Home Type",
52
+ ]
53
+
54
+ if "Zip" in filename:
55
+ continue
56
+ if "Neighborhood" in filename:
57
+ continue
58
+ if "City" in filename:
59
+ continue
60
+ if "Metro" in filename:
61
+ continue
62
+ if "County" in filename:
63
+ continue
64
+
65
+ if "City" in filename:
66
+ exclude_columns = exclude_columns + ["State", "Metro", "CountyName"]
67
+ elif "Zip" in filename:
68
+ exclude_columns = exclude_columns + [
69
+ "State",
70
+ "City",
71
+ "Metro",
72
+ "CountyName",
73
+ ]
74
+ elif "County" in filename:
75
+ exclude_columns = exclude_columns + [
76
+ "State",
77
+ "Metro",
78
+ "StateCodeFIPS",
79
+ "MunicipalCodeFIPS",
80
+ ]
81
+ elif "Neighborhood" in filename:
82
+ exclude_columns = exclude_columns + [
83
+ "State",
84
+ "City",
85
+ "Metro",
86
+ "CountyName",
87
+ ]
88
+
89
+ if "_bdrmcnt_1_" in filename:
90
+ cur_df["Bedroom Count"] = "1-Bedroom"
91
+ elif "_bdrmcnt_2_" in filename:
92
+ cur_df["Bedroom Count"] = "2-Bedrooms"
93
+ elif "_bdrmcnt_3_" in filename:
94
+ cur_df["Bedroom Count"] = "3-Bedrooms"
95
+ elif "_bdrmcnt_4_" in filename:
96
+ cur_df["Bedroom Count"] = "4-Bedrooms"
97
+ elif "_bdrmcnt_5_" in filename:
98
+ cur_df["Bedroom Count"] = "5+-Bedrooms"
99
+ else:
100
+ cur_df["Bedroom Count"] = "All Bedrooms"
101
+
102
+ cur_df = set_home_type(cur_df, filename)
103
+
104
+ cur_df["StateName"] = cur_df["StateName"].astype(str)
105
+ cur_df["RegionName"] = cur_df["RegionName"].astype(str)
106
+
107
+ data_frames = handle_slug_column_mappings(
108
+ data_frames, slug_column_mappings, exclude_columns, filename, cur_df
109
+ )
110
+
111
+
112
+ combined_df = get_combined_df(
113
+ data_frames,
114
+ [
115
+ "RegionID",
116
+ "SizeRank",
117
+ "RegionName",
118
+ "RegionType",
119
+ "StateName",
120
+ "Bedroom Count",
121
+ "Home Type",
122
+ "Date",
123
+ ],
124
+ )
125
+
126
+ combined_df
127
+
128
+
129
+ # In[11]:
130
+
131
+
132
+ final_df = combined_df
133
+
134
+ for index, row in final_df.iterrows():
135
+ if row["RegionType"] == "city":
136
+ final_df.at[index, "City"] = row["RegionName"]
137
+ elif row["RegionType"] == "county":
138
+ final_df.at[index, "County"] = row["RegionName"]
139
+ if row["RegionType"] == "state":
140
+ final_df.at[index, "StateName"] = row["RegionName"]
141
+
142
+ # coalesce State and StateName columns
143
+ # final_df["State"] = final_df["State"].combine_first(final_df["StateName"])
144
+ # final_df["County"] = final_df["County"].combine_first(final_df["CountyName"])
145
+
146
+ # final_df = final_df.drop(
147
+ # columns=[
148
+ # "StateName",
149
+ # # "CountyName"
150
+ # ]
151
+ # )
152
+ final_df
153
+
154
+
155
+ # In[12]:
156
+
157
+
158
+ final_df = final_df.rename(
159
+ columns={
160
+ "RegionID": "Region ID",
161
+ "SizeRank": "Size Rank",
162
+ "RegionName": "Region",
163
+ "RegionType": "Region Type",
164
+ "StateCodeFIPS": "State Code FIPS",
165
+ "StateName": "State",
166
+ "MunicipalCodeFIPS": "Municipal Code FIPS",
167
+ }
168
+ )
169
+
170
+ final_df["Date"] = pd.to_datetime(final_df["Date"], format="%Y-%m-%d")
171
+
172
+ final_df
173
+
174
+
175
+ # In[13]:
176
+
177
+
178
+ save_final_df_as_jsonl(FULL_PROCESSED_DIR_PATH, final_df)
179
+
processors/home_values_forecasts.ipynb CHANGED
@@ -398,6 +398,7 @@
398
  " else:\n",
399
  " # print('Raw')\n",
400
  " cur_df.columns = list(cur_df.columns[:-3]) + cols\n",
 
401
  " cur_df[\"RegionName\"] = cur_df[\"RegionName\"].astype(str)\n",
402
  "\n",
403
  " data_frames.append(cur_df)\n",
@@ -461,6 +462,8 @@
461
  " state = regionName.split(\", \")[1]\n",
462
  " final_df.at[index, \"State\"] = state\n",
463
  "\n",
 
 
464
  "final_df"
465
  ]
466
  },
 
398
  " else:\n",
399
  " # print('Raw')\n",
400
  " cur_df.columns = list(cur_df.columns[:-3]) + cols\n",
401
+ "\n",
402
  " cur_df[\"RegionName\"] = cur_df[\"RegionName\"].astype(str)\n",
403
  "\n",
404
  " data_frames.append(cur_df)\n",
 
462
  " state = regionName.split(\", \")[1]\n",
463
  " final_df.at[index, \"State\"] = state\n",
464
  "\n",
465
+ "final_df[\"Date\"] = pd.to_datetime(final_df[\"Date\"], format=\"%Y-%m-%d\")\n",
466
+ "\n",
467
  "final_df"
468
  ]
469
  },
processors/home_values_forecasts.py ADDED
@@ -0,0 +1,100 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ # coding: utf-8
3
+
4
+ # In[1]:
5
+
6
+
7
+ import pandas as pd
8
+ import os
9
+
10
+ from helpers import get_combined_df, save_final_df_as_jsonl
11
+
12
+
13
+ # In[2]:
14
+
15
+
16
+ DATA_DIR = "../data/"
17
+ PROCESSED_DIR = "../processed/"
18
+ FACET_DIR = "home_values_forecasts/"
19
+ FULL_DATA_DIR_PATH = os.path.join(DATA_DIR, FACET_DIR)
20
+ FULL_PROCESSED_DIR_PATH = os.path.join(PROCESSED_DIR, FACET_DIR)
21
+
22
+
23
+ # In[3]:
24
+
25
+
26
+ data_frames = []
27
+
28
+ for filename in os.listdir(FULL_DATA_DIR_PATH):
29
+ if filename.endswith(".csv"):
30
+ print("processing " + filename)
31
+ cur_df = pd.read_csv(os.path.join(FULL_DATA_DIR_PATH, filename))
32
+
33
+ cols = ["Month Over Month %", "Quarter Over Quarter %", "Year Over Year %"]
34
+ if filename.endswith("sm_sa_month.csv"):
35
+ # print('Smoothed')
36
+ cur_df.columns = list(cur_df.columns[:-3]) + [
37
+ x + " (Smoothed) (Seasonally Adjusted)" for x in cols
38
+ ]
39
+ else:
40
+ # print('Raw')
41
+ cur_df.columns = list(cur_df.columns[:-3]) + cols
42
+
43
+ cur_df["RegionName"] = cur_df["RegionName"].astype(str)
44
+
45
+ data_frames.append(cur_df)
46
+
47
+
48
+ combined_df = get_combined_df(
49
+ data_frames,
50
+ [
51
+ "RegionID",
52
+ "RegionType",
53
+ "SizeRank",
54
+ "StateName",
55
+ "BaseDate",
56
+ ],
57
+ )
58
+
59
+ combined_df
60
+
61
+
62
+ # In[1]:
63
+
64
+
65
+ # Adjust columns
66
+ final_df = combined_df
67
+ final_df = combined_df.drop("StateName", axis=1)
68
+ final_df = final_df.rename(
69
+ columns={
70
+ "CountyName": "County",
71
+ "BaseDate": "Date",
72
+ "RegionName": "Region",
73
+ "RegionType": "Region Type",
74
+ "RegionID": "Region ID",
75
+ "SizeRank": "Size Rank",
76
+ }
77
+ )
78
+
79
+ # iterate over rows of final_df and populate State and City columns if the regionType is msa
80
+ for index, row in final_df.iterrows():
81
+ if row["Region Type"] == "msa":
82
+ regionName = row["Region"]
83
+ # final_df.at[index, 'Metro'] = regionName
84
+
85
+ city = regionName.split(", ")[0]
86
+ final_df.at[index, "City"] = city
87
+
88
+ state = regionName.split(", ")[1]
89
+ final_df.at[index, "State"] = state
90
+
91
+ final_df["Date"] = pd.to_datetime(final_df["Date"], format="%Y-%m-%d")
92
+
93
+ final_df
94
+
95
+
96
+ # In[9]:
97
+
98
+
99
+ save_final_df_as_jsonl(FULL_PROCESSED_DIR_PATH, final_df)
100
+
processors/new_construction.ipynb CHANGED
@@ -13,6 +13,7 @@
13
  " get_combined_df,\n",
14
  " save_final_df_as_jsonl,\n",
15
  " handle_slug_column_mappings,\n",
 
16
  ")"
17
  ]
18
  },
@@ -289,12 +290,7 @@
289
  " print(\"processing \" + filename)\n",
290
  " cur_df = pd.read_csv(os.path.join(FULL_DATA_DIR_PATH, filename))\n",
291
  "\n",
292
- " if \"sfrcondo\" in filename:\n",
293
- " cur_df[\"Home Type\"] = \"all homes\"\n",
294
- " elif \"sfr\" in filename:\n",
295
- " cur_df[\"Home Type\"] = \"SFR\"\n",
296
- " elif \"condo\" in filename:\n",
297
- " cur_df[\"Home Type\"] = \"condo/co-op only\"\n",
298
  "\n",
299
  " data_frames = handle_slug_column_mappings(\n",
300
  " data_frames, slug_column_mappings, exclude_columns, filename, cur_df\n",
@@ -551,6 +547,8 @@
551
  " }\n",
552
  ")\n",
553
  "\n",
 
 
554
  "final_df.sort_values(by=[\"Region ID\", \"Home Type\", \"Date\"])"
555
  ]
556
  },
 
13
  " get_combined_df,\n",
14
  " save_final_df_as_jsonl,\n",
15
  " handle_slug_column_mappings,\n",
16
+ " set_home_type,\n",
17
  ")"
18
  ]
19
  },
 
290
  " print(\"processing \" + filename)\n",
291
  " cur_df = pd.read_csv(os.path.join(FULL_DATA_DIR_PATH, filename))\n",
292
  "\n",
293
+ " cur_df = set_home_type(cur_df, filename)\n",
 
 
 
 
 
294
  "\n",
295
  " data_frames = handle_slug_column_mappings(\n",
296
  " data_frames, slug_column_mappings, exclude_columns, filename, cur_df\n",
 
547
  " }\n",
548
  ")\n",
549
  "\n",
550
+ "final_df[\"Date\"] = pd.to_datetime(final_df[\"Date\"], format=\"%Y-%m-%d\")\n",
551
+ "\n",
552
  "final_df.sort_values(by=[\"Region ID\", \"Home Type\", \"Date\"])"
553
  ]
554
  },
processors/new_construction.py ADDED
@@ -0,0 +1,99 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ # coding: utf-8
3
+
4
+ # In[1]:
5
+
6
+
7
+ import pandas as pd
8
+ import os
9
+
10
+ from helpers import (
11
+ get_combined_df,
12
+ save_final_df_as_jsonl,
13
+ handle_slug_column_mappings,
14
+ set_home_type,
15
+ )
16
+
17
+
18
+ # In[2]:
19
+
20
+
21
+ DATA_DIR = "../data"
22
+ PROCESSED_DIR = "../processed/"
23
+ FACET_DIR = "new_construction/"
24
+ FULL_DATA_DIR_PATH = os.path.join(DATA_DIR, FACET_DIR)
25
+ FULL_PROCESSED_DIR_PATH = os.path.join(PROCESSED_DIR, FACET_DIR)
26
+
27
+
28
+ # In[3]:
29
+
30
+
31
+ exclude_columns = [
32
+ "RegionID",
33
+ "SizeRank",
34
+ "RegionName",
35
+ "RegionType",
36
+ "StateName",
37
+ "Home Type",
38
+ ]
39
+
40
+ slug_column_mappings = {
41
+ "_median_sale_price_per_sqft": "Median Sale Price per Sqft",
42
+ "_median_sale_price": "Median Sale Price",
43
+ "sales_count": "Sales Count",
44
+ }
45
+
46
+ data_frames = []
47
+
48
+ for filename in os.listdir(FULL_DATA_DIR_PATH):
49
+ if filename.endswith(".csv"):
50
+ print("processing " + filename)
51
+ cur_df = pd.read_csv(os.path.join(FULL_DATA_DIR_PATH, filename))
52
+
53
+ cur_df = set_home_type(cur_df, filename)
54
+
55
+ data_frames = handle_slug_column_mappings(
56
+ data_frames, slug_column_mappings, exclude_columns, filename, cur_df
57
+ )
58
+
59
+
60
+ combined_df = get_combined_df(
61
+ data_frames,
62
+ [
63
+ "RegionID",
64
+ "SizeRank",
65
+ "RegionName",
66
+ "RegionType",
67
+ "StateName",
68
+ "Home Type",
69
+ "Date",
70
+ ],
71
+ )
72
+
73
+ combined_df
74
+
75
+
76
+ # In[4]:
77
+
78
+
79
+ final_df = combined_df
80
+ final_df = final_df.rename(
81
+ columns={
82
+ "RegionID": "Region ID",
83
+ "SizeRank": "Size Rank",
84
+ "RegionName": "Region",
85
+ "RegionType": "Region Type",
86
+ "StateName": "State",
87
+ }
88
+ )
89
+
90
+ final_df["Date"] = pd.to_datetime(final_df["Date"], format="%Y-%m-%d")
91
+
92
+ final_df.sort_values(by=["Region ID", "Home Type", "Date"])
93
+
94
+
95
+ # In[5]:
96
+
97
+
98
+ save_final_df_as_jsonl(FULL_PROCESSED_DIR_PATH, final_df)
99
+
processors/rentals.ipynb CHANGED
@@ -13,6 +13,7 @@
13
  " get_combined_df,\n",
14
  " save_final_df_as_jsonl,\n",
15
  " handle_slug_column_mappings,\n",
 
16
  ")"
17
  ]
18
  },
@@ -368,56 +369,51 @@
368
  " \"Home Type\",\n",
369
  " ]\n",
370
  "\n",
371
- " if \"_sfrcondomfr_\" in filename:\n",
372
- " cur_df[\"Home Type\"] = \"all homes plus multifamily\"\n",
373
- " # change column type to string\n",
374
- " cur_df[\"RegionName\"] = cur_df[\"RegionName\"].astype(str)\n",
375
- " if \"City\" in filename:\n",
376
- " exclude_columns = [\n",
377
- " \"RegionID\",\n",
378
- " \"SizeRank\",\n",
379
- " \"RegionName\",\n",
380
- " \"RegionType\",\n",
381
- " \"StateName\",\n",
382
- " \"Home Type\",\n",
383
- " # City Specific\n",
384
- " \"State\",\n",
385
- " \"Metro\",\n",
386
- " \"CountyName\",\n",
387
- " ]\n",
388
- " elif \"Zip\" in filename:\n",
389
- " exclude_columns = [\n",
390
- " \"RegionID\",\n",
391
- " \"SizeRank\",\n",
392
- " \"RegionName\",\n",
393
- " \"RegionType\",\n",
394
- " \"StateName\",\n",
395
- " \"Home Type\",\n",
396
- " # Zip Specific\n",
397
- " \"State\",\n",
398
- " \"City\",\n",
399
- " \"Metro\",\n",
400
- " \"CountyName\",\n",
401
- " ]\n",
402
- " elif \"County\" in filename:\n",
403
- " exclude_columns = [\n",
404
- " \"RegionID\",\n",
405
- " \"SizeRank\",\n",
406
- " \"RegionName\",\n",
407
- " \"RegionType\",\n",
408
- " \"StateName\",\n",
409
- " \"Home Type\",\n",
410
- " # County Specific\n",
411
- " \"State\",\n",
412
- " \"Metro\",\n",
413
- " \"StateCodeFIPS\",\n",
414
- " \"MunicipalCodeFIPS\",\n",
415
- " ]\n",
416
  "\n",
417
- " elif \"_sfr_\" in filename:\n",
418
- " cur_df[\"Home Type\"] = \"SFR\"\n",
419
- " elif \"_mfr_\" in filename:\n",
420
- " cur_df[\"Home Type\"] = \"multifamily\"\n",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
421
  "\n",
422
  " data_frames = handle_slug_column_mappings(\n",
423
  " data_frames, slug_column_mappings, exclude_columns, filename, cur_df\n",
@@ -1075,6 +1071,8 @@
1075
  " }\n",
1076
  ")\n",
1077
  "\n",
 
 
1078
  "final_df"
1079
  ]
1080
  },
 
13
  " get_combined_df,\n",
14
  " save_final_df_as_jsonl,\n",
15
  " handle_slug_column_mappings,\n",
16
+ " set_home_type,\n",
17
  ")"
18
  ]
19
  },
 
369
  " \"Home Type\",\n",
370
  " ]\n",
371
  "\n",
372
+ " cur_df[\"RegionName\"] = cur_df[\"RegionName\"].astype(str)\n",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
373
  "\n",
374
+ " cur_df = set_home_type(cur_df, filename)\n",
375
+ "\n",
376
+ " if \"City\" in filename:\n",
377
+ " exclude_columns = [\n",
378
+ " \"RegionID\",\n",
379
+ " \"SizeRank\",\n",
380
+ " \"RegionName\",\n",
381
+ " \"RegionType\",\n",
382
+ " \"StateName\",\n",
383
+ " \"Home Type\",\n",
384
+ " # City Specific\n",
385
+ " \"State\",\n",
386
+ " \"Metro\",\n",
387
+ " \"CountyName\",\n",
388
+ " ]\n",
389
+ " elif \"Zip\" in filename:\n",
390
+ " exclude_columns = [\n",
391
+ " \"RegionID\",\n",
392
+ " \"SizeRank\",\n",
393
+ " \"RegionName\",\n",
394
+ " \"RegionType\",\n",
395
+ " \"StateName\",\n",
396
+ " \"Home Type\",\n",
397
+ " # Zip Specific\n",
398
+ " \"State\",\n",
399
+ " \"City\",\n",
400
+ " \"Metro\",\n",
401
+ " \"CountyName\",\n",
402
+ " ]\n",
403
+ " elif \"County\" in filename:\n",
404
+ " exclude_columns = [\n",
405
+ " \"RegionID\",\n",
406
+ " \"SizeRank\",\n",
407
+ " \"RegionName\",\n",
408
+ " \"RegionType\",\n",
409
+ " \"StateName\",\n",
410
+ " \"Home Type\",\n",
411
+ " # County Specific\n",
412
+ " \"State\",\n",
413
+ " \"Metro\",\n",
414
+ " \"StateCodeFIPS\",\n",
415
+ " \"MunicipalCodeFIPS\",\n",
416
+ " ]\n",
417
  "\n",
418
  " data_frames = handle_slug_column_mappings(\n",
419
  " data_frames, slug_column_mappings, exclude_columns, filename, cur_df\n",
 
1071
  " }\n",
1072
  ")\n",
1073
  "\n",
1074
+ "final_df[\"Date\"] = pd.to_datetime(final_df[\"Date\"], format=\"%Y-%m-%d\")\n",
1075
+ "\n",
1076
  "final_df"
1077
  ]
1078
  },
processors/rentals.py ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ # coding: utf-8
3
+
4
+ # In[2]:
5
+
6
+
7
+ import pandas as pd
8
+ import os
9
+
10
+ from helpers import (
11
+ get_combined_df,
12
+ save_final_df_as_jsonl,
13
+ handle_slug_column_mappings,
14
+ set_home_type,
15
+ )
16
+
17
+
18
+ # In[3]:
19
+
20
+
21
+ DATA_DIR = "../data"
22
+ PROCESSED_DIR = "../processed/"
23
+ FACET_DIR = "rentals/"
24
+ FULL_DATA_DIR_PATH = os.path.join(DATA_DIR, FACET_DIR)
25
+ FULL_PROCESSED_DIR_PATH = os.path.join(PROCESSED_DIR, FACET_DIR)
26
+
27
+
28
+ # In[7]:
29
+
30
+
31
+ data_frames = []
32
+
33
+ slug_column_mappings = {"": "Rent"}
34
+
35
+ for filename in os.listdir(FULL_DATA_DIR_PATH):
36
+ if filename.endswith(".csv"):
37
+ # print("processing " + filename)
38
+ cur_df = pd.read_csv(os.path.join(FULL_DATA_DIR_PATH, filename))
39
+ exclude_columns = [
40
+ "RegionID",
41
+ "SizeRank",
42
+ "RegionName",
43
+ "RegionType",
44
+ "StateName",
45
+ "Home Type",
46
+ ]
47
+
48
+ cur_df["RegionName"] = cur_df["RegionName"].astype(str)
49
+
50
+ cur_df = set_home_type(cur_df, filename)
51
+
52
+ if "City" in filename:
53
+ exclude_columns = [
54
+ "RegionID",
55
+ "SizeRank",
56
+ "RegionName",
57
+ "RegionType",
58
+ "StateName",
59
+ "Home Type",
60
+ # City Specific
61
+ "State",
62
+ "Metro",
63
+ "CountyName",
64
+ ]
65
+ elif "Zip" in filename:
66
+ exclude_columns = [
67
+ "RegionID",
68
+ "SizeRank",
69
+ "RegionName",
70
+ "RegionType",
71
+ "StateName",
72
+ "Home Type",
73
+ # Zip Specific
74
+ "State",
75
+ "City",
76
+ "Metro",
77
+ "CountyName",
78
+ ]
79
+ elif "County" in filename:
80
+ exclude_columns = [
81
+ "RegionID",
82
+ "SizeRank",
83
+ "RegionName",
84
+ "RegionType",
85
+ "StateName",
86
+ "Home Type",
87
+ # County Specific
88
+ "State",
89
+ "Metro",
90
+ "StateCodeFIPS",
91
+ "MunicipalCodeFIPS",
92
+ ]
93
+
94
+ data_frames = handle_slug_column_mappings(
95
+ data_frames, slug_column_mappings, exclude_columns, filename, cur_df
96
+ )
97
+
98
+
99
+ combined_df = get_combined_df(
100
+ data_frames,
101
+ [
102
+ "RegionID",
103
+ "SizeRank",
104
+ "RegionName",
105
+ "RegionType",
106
+ "StateName",
107
+ "Home Type",
108
+ "Date",
109
+ ],
110
+ )
111
+
112
+ combined_df
113
+
114
+
115
+ # In[8]:
116
+
117
+
118
+ final_df = combined_df
119
+
120
+ for index, row in final_df.iterrows():
121
+ if row["RegionType"] == "city":
122
+ final_df.at[index, "City"] = row["RegionName"]
123
+ elif row["RegionType"] == "county":
124
+ final_df.at[index, "County"] = row["RegionName"]
125
+
126
+ # coalesce State and StateName columns
127
+ final_df["State"] = final_df["State"].combine_first(final_df["StateName"])
128
+ final_df["State"] = final_df["County"].combine_first(final_df["CountyName"])
129
+
130
+ final_df = final_df.drop(columns=["StateName", "CountyName"])
131
+ final_df
132
+
133
+
134
+ # In[6]:
135
+
136
+
137
+ # Adjust column names
138
+ final_df = final_df.rename(
139
+ columns={
140
+ "RegionID": "Region ID",
141
+ "SizeRank": "Size Rank",
142
+ "RegionName": "Region",
143
+ "RegionType": "Region Type",
144
+ "StateCodeFIPS": "State Code FIPS",
145
+ "MunicipalCodeFIPS": "Municipal Code FIPS",
146
+ }
147
+ )
148
+
149
+ final_df["Date"] = pd.to_datetime(final_df["Date"], format="%Y-%m-%d")
150
+
151
+ final_df
152
+
153
+
154
+ # In[7]:
155
+
156
+
157
+ save_final_df_as_jsonl(FULL_PROCESSED_DIR_PATH, final_df)
158
+
processors/sales.ipynb CHANGED
@@ -2,7 +2,7 @@
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
- "execution_count": 1,
6
  "metadata": {},
7
  "outputs": [],
8
  "source": [
@@ -13,12 +13,13 @@
13
  " get_combined_df,\n",
14
  " save_final_df_as_jsonl,\n",
15
  " handle_slug_column_mappings,\n",
 
16
  ")"
17
  ]
18
  },
19
  {
20
  "cell_type": "code",
21
- "execution_count": 2,
22
  "metadata": {},
23
  "outputs": [],
24
  "source": [
@@ -31,7 +32,7 @@
31
  },
32
  {
33
  "cell_type": "code",
34
- "execution_count": 3,
35
  "metadata": {},
36
  "outputs": [
37
  {
@@ -441,7 +442,7 @@
441
  "[255024 rows x 18 columns]"
442
  ]
443
  },
444
- "execution_count": 3,
445
  "metadata": {},
446
  "output_type": "execute_result"
447
  }
@@ -476,10 +477,7 @@
476
  "\n",
477
  " cur_df = pd.read_csv(os.path.join(FULL_DATA_DIR_PATH, filename))\n",
478
  "\n",
479
- " if \"_sfrcondo_\" in filename:\n",
480
- " cur_df[\"Home Type\"] = \"all homes\"\n",
481
- " elif \"_sfr_\" in filename:\n",
482
- " cur_df[\"Home Type\"] = \"SFR\"\n",
483
  "\n",
484
  " data_frames = handle_slug_column_mappings(\n",
485
  " data_frames, slug_column_mappings, exclude_columns, filename, cur_df\n",
@@ -504,7 +502,7 @@
504
  },
505
  {
506
  "cell_type": "code",
507
- "execution_count": 6,
508
  "metadata": {},
509
  "outputs": [
510
  {
@@ -880,7 +878,7 @@
880
  "[255024 rows x 18 columns]"
881
  ]
882
  },
883
- "execution_count": 6,
884
  "metadata": {},
885
  "output_type": "execute_result"
886
  }
@@ -903,7 +901,396 @@
903
  },
904
  {
905
  "cell_type": "code",
906
- "execution_count": 7,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
907
  "metadata": {},
908
  "outputs": [],
909
  "source": [
 
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
+ "execution_count": 2,
6
  "metadata": {},
7
  "outputs": [],
8
  "source": [
 
13
  " get_combined_df,\n",
14
  " save_final_df_as_jsonl,\n",
15
  " handle_slug_column_mappings,\n",
16
+ " set_home_type,\n",
17
  ")"
18
  ]
19
  },
20
  {
21
  "cell_type": "code",
22
+ "execution_count": 3,
23
  "metadata": {},
24
  "outputs": [],
25
  "source": [
 
32
  },
33
  {
34
  "cell_type": "code",
35
+ "execution_count": 4,
36
  "metadata": {},
37
  "outputs": [
38
  {
 
442
  "[255024 rows x 18 columns]"
443
  ]
444
  },
445
+ "execution_count": 4,
446
  "metadata": {},
447
  "output_type": "execute_result"
448
  }
 
477
  "\n",
478
  " cur_df = pd.read_csv(os.path.join(FULL_DATA_DIR_PATH, filename))\n",
479
  "\n",
480
+ " cur_df = set_home_type(cur_df, filename)\n",
 
 
 
481
  "\n",
482
  " data_frames = handle_slug_column_mappings(\n",
483
  " data_frames, slug_column_mappings, exclude_columns, filename, cur_df\n",
 
502
  },
503
  {
504
  "cell_type": "code",
505
+ "execution_count": 52,
506
  "metadata": {},
507
  "outputs": [
508
  {
 
878
  "[255024 rows x 18 columns]"
879
  ]
880
  },
881
+ "execution_count": 52,
882
  "metadata": {},
883
  "output_type": "execute_result"
884
  }
 
901
  },
902
  {
903
  "cell_type": "code",
904
+ "execution_count": 53,
905
+ "metadata": {},
906
+ "outputs": [
907
+ {
908
+ "data": {
909
+ "text/html": [
910
+ "<div>\n",
911
+ "<style scoped>\n",
912
+ " .dataframe tbody tr th:only-of-type {\n",
913
+ " vertical-align: middle;\n",
914
+ " }\n",
915
+ "\n",
916
+ " .dataframe tbody tr th {\n",
917
+ " vertical-align: top;\n",
918
+ " }\n",
919
+ "\n",
920
+ " .dataframe thead th {\n",
921
+ " text-align: right;\n",
922
+ " }\n",
923
+ "</style>\n",
924
+ "<table border=\"1\" class=\"dataframe\">\n",
925
+ " <thead>\n",
926
+ " <tr style=\"text-align: right;\">\n",
927
+ " <th></th>\n",
928
+ " <th>Region ID</th>\n",
929
+ " <th>Size Rank</th>\n",
930
+ " <th>Region</th>\n",
931
+ " <th>Region Type</th>\n",
932
+ " <th>State</th>\n",
933
+ " <th>Home Type</th>\n",
934
+ " <th>Date</th>\n",
935
+ " <th>Median Sale to List Ratio</th>\n",
936
+ " <th>Median Sale Price</th>\n",
937
+ " <th>Median Sale Price (Smoothed) (Seasonally Adjusted)</th>\n",
938
+ " <th>Median Sale Price (Smoothed)</th>\n",
939
+ " <th>% Sold Below List (Smoothed)</th>\n",
940
+ " <th>Median Sale to List Ratio (Smoothed)</th>\n",
941
+ " <th>% Sold Above List</th>\n",
942
+ " <th>Mean Sale to List Ratio (Smoothed)</th>\n",
943
+ " <th>Mean Sale to List Ratio</th>\n",
944
+ " <th>% Sold Below List</th>\n",
945
+ " <th>% Sold Above List (Smoothed)</th>\n",
946
+ " </tr>\n",
947
+ " </thead>\n",
948
+ " <tbody>\n",
949
+ " <tr>\n",
950
+ " <th>0</th>\n",
951
+ " <td>102001</td>\n",
952
+ " <td>0</td>\n",
953
+ " <td>United States</td>\n",
954
+ " <td>country</td>\n",
955
+ " <td>NaN</td>\n",
956
+ " <td>SFR</td>\n",
957
+ " <td>2008-02-02</td>\n",
958
+ " <td>NaN</td>\n",
959
+ " <td>172000.0</td>\n",
960
+ " <td>NaN</td>\n",
961
+ " <td>NaN</td>\n",
962
+ " <td>NaN</td>\n",
963
+ " <td>NaN</td>\n",
964
+ " <td>NaN</td>\n",
965
+ " <td>NaN</td>\n",
966
+ " <td>NaN</td>\n",
967
+ " <td>NaN</td>\n",
968
+ " <td>NaN</td>\n",
969
+ " </tr>\n",
970
+ " <tr>\n",
971
+ " <th>1</th>\n",
972
+ " <td>102001</td>\n",
973
+ " <td>0</td>\n",
974
+ " <td>United States</td>\n",
975
+ " <td>country</td>\n",
976
+ " <td>NaN</td>\n",
977
+ " <td>SFR</td>\n",
978
+ " <td>2008-02-09</td>\n",
979
+ " <td>NaN</td>\n",
980
+ " <td>165400.0</td>\n",
981
+ " <td>NaN</td>\n",
982
+ " <td>NaN</td>\n",
983
+ " <td>NaN</td>\n",
984
+ " <td>NaN</td>\n",
985
+ " <td>NaN</td>\n",
986
+ " <td>NaN</td>\n",
987
+ " <td>NaN</td>\n",
988
+ " <td>NaN</td>\n",
989
+ " <td>NaN</td>\n",
990
+ " </tr>\n",
991
+ " <tr>\n",
992
+ " <th>2</th>\n",
993
+ " <td>102001</td>\n",
994
+ " <td>0</td>\n",
995
+ " <td>United States</td>\n",
996
+ " <td>country</td>\n",
997
+ " <td>NaN</td>\n",
998
+ " <td>SFR</td>\n",
999
+ " <td>2008-02-16</td>\n",
1000
+ " <td>NaN</td>\n",
1001
+ " <td>168000.0</td>\n",
1002
+ " <td>NaN</td>\n",
1003
+ " <td>NaN</td>\n",
1004
+ " <td>NaN</td>\n",
1005
+ " <td>NaN</td>\n",
1006
+ " <td>NaN</td>\n",
1007
+ " <td>NaN</td>\n",
1008
+ " <td>NaN</td>\n",
1009
+ " <td>NaN</td>\n",
1010
+ " <td>NaN</td>\n",
1011
+ " </tr>\n",
1012
+ " <tr>\n",
1013
+ " <th>3</th>\n",
1014
+ " <td>102001</td>\n",
1015
+ " <td>0</td>\n",
1016
+ " <td>United States</td>\n",
1017
+ " <td>country</td>\n",
1018
+ " <td>NaN</td>\n",
1019
+ " <td>SFR</td>\n",
1020
+ " <td>2008-02-23</td>\n",
1021
+ " <td>NaN</td>\n",
1022
+ " <td>167600.0</td>\n",
1023
+ " <td>NaN</td>\n",
1024
+ " <td>167600.0</td>\n",
1025
+ " <td>NaN</td>\n",
1026
+ " <td>NaN</td>\n",
1027
+ " <td>NaN</td>\n",
1028
+ " <td>NaN</td>\n",
1029
+ " <td>NaN</td>\n",
1030
+ " <td>NaN</td>\n",
1031
+ " <td>NaN</td>\n",
1032
+ " </tr>\n",
1033
+ " <tr>\n",
1034
+ " <th>4</th>\n",
1035
+ " <td>102001</td>\n",
1036
+ " <td>0</td>\n",
1037
+ " <td>United States</td>\n",
1038
+ " <td>country</td>\n",
1039
+ " <td>NaN</td>\n",
1040
+ " <td>SFR</td>\n",
1041
+ " <td>2008-03-01</td>\n",
1042
+ " <td>NaN</td>\n",
1043
+ " <td>168100.0</td>\n",
1044
+ " <td>NaN</td>\n",
1045
+ " <td>168100.0</td>\n",
1046
+ " <td>NaN</td>\n",
1047
+ " <td>NaN</td>\n",
1048
+ " <td>NaN</td>\n",
1049
+ " <td>NaN</td>\n",
1050
+ " <td>NaN</td>\n",
1051
+ " <td>NaN</td>\n",
1052
+ " <td>NaN</td>\n",
1053
+ " </tr>\n",
1054
+ " <tr>\n",
1055
+ " <th>...</th>\n",
1056
+ " <td>...</td>\n",
1057
+ " <td>...</td>\n",
1058
+ " <td>...</td>\n",
1059
+ " <td>...</td>\n",
1060
+ " <td>...</td>\n",
1061
+ " <td>...</td>\n",
1062
+ " <td>...</td>\n",
1063
+ " <td>...</td>\n",
1064
+ " <td>...</td>\n",
1065
+ " <td>...</td>\n",
1066
+ " <td>...</td>\n",
1067
+ " <td>...</td>\n",
1068
+ " <td>...</td>\n",
1069
+ " <td>...</td>\n",
1070
+ " <td>...</td>\n",
1071
+ " <td>...</td>\n",
1072
+ " <td>...</td>\n",
1073
+ " <td>...</td>\n",
1074
+ " </tr>\n",
1075
+ " <tr>\n",
1076
+ " <th>255019</th>\n",
1077
+ " <td>845160</td>\n",
1078
+ " <td>198</td>\n",
1079
+ " <td>Prescott Valley, AZ</td>\n",
1080
+ " <td>msa</td>\n",
1081
+ " <td>AZ</td>\n",
1082
+ " <td>all homes</td>\n",
1083
+ " <td>2023-11-11</td>\n",
1084
+ " <td>0.985132</td>\n",
1085
+ " <td>515000.0</td>\n",
1086
+ " <td>480020.0</td>\n",
1087
+ " <td>480020.0</td>\n",
1088
+ " <td>0.651221</td>\n",
1089
+ " <td>0.982460</td>\n",
1090
+ " <td>0.080000</td>\n",
1091
+ " <td>0.978546</td>\n",
1092
+ " <td>0.983288</td>\n",
1093
+ " <td>0.680000</td>\n",
1094
+ " <td>0.119711</td>\n",
1095
+ " </tr>\n",
1096
+ " <tr>\n",
1097
+ " <th>255020</th>\n",
1098
+ " <td>845160</td>\n",
1099
+ " <td>198</td>\n",
1100
+ " <td>Prescott Valley, AZ</td>\n",
1101
+ " <td>msa</td>\n",
1102
+ " <td>AZ</td>\n",
1103
+ " <td>all homes</td>\n",
1104
+ " <td>2023-11-18</td>\n",
1105
+ " <td>0.972559</td>\n",
1106
+ " <td>510000.0</td>\n",
1107
+ " <td>476901.0</td>\n",
1108
+ " <td>476901.0</td>\n",
1109
+ " <td>0.659583</td>\n",
1110
+ " <td>0.980362</td>\n",
1111
+ " <td>0.142857</td>\n",
1112
+ " <td>0.972912</td>\n",
1113
+ " <td>0.958341</td>\n",
1114
+ " <td>0.625000</td>\n",
1115
+ " <td>0.120214</td>\n",
1116
+ " </tr>\n",
1117
+ " <tr>\n",
1118
+ " <th>255021</th>\n",
1119
+ " <td>845160</td>\n",
1120
+ " <td>198</td>\n",
1121
+ " <td>Prescott Valley, AZ</td>\n",
1122
+ " <td>msa</td>\n",
1123
+ " <td>AZ</td>\n",
1124
+ " <td>all homes</td>\n",
1125
+ " <td>2023-11-25</td>\n",
1126
+ " <td>0.979644</td>\n",
1127
+ " <td>484500.0</td>\n",
1128
+ " <td>496540.0</td>\n",
1129
+ " <td>496540.0</td>\n",
1130
+ " <td>0.669387</td>\n",
1131
+ " <td>0.979179</td>\n",
1132
+ " <td>0.088235</td>\n",
1133
+ " <td>0.971177</td>\n",
1134
+ " <td>0.973797</td>\n",
1135
+ " <td>0.705882</td>\n",
1136
+ " <td>0.107185</td>\n",
1137
+ " </tr>\n",
1138
+ " <tr>\n",
1139
+ " <th>255022</th>\n",
1140
+ " <td>845160</td>\n",
1141
+ " <td>198</td>\n",
1142
+ " <td>Prescott Valley, AZ</td>\n",
1143
+ " <td>msa</td>\n",
1144
+ " <td>AZ</td>\n",
1145
+ " <td>all homes</td>\n",
1146
+ " <td>2023-12-02</td>\n",
1147
+ " <td>0.978261</td>\n",
1148
+ " <td>538000.0</td>\n",
1149
+ " <td>510491.0</td>\n",
1150
+ " <td>510491.0</td>\n",
1151
+ " <td>0.678777</td>\n",
1152
+ " <td>0.978899</td>\n",
1153
+ " <td>0.126761</td>\n",
1154
+ " <td>0.970576</td>\n",
1155
+ " <td>0.966876</td>\n",
1156
+ " <td>0.704225</td>\n",
1157
+ " <td>0.109463</td>\n",
1158
+ " </tr>\n",
1159
+ " <tr>\n",
1160
+ " <th>255023</th>\n",
1161
+ " <td>845160</td>\n",
1162
+ " <td>198</td>\n",
1163
+ " <td>Prescott Valley, AZ</td>\n",
1164
+ " <td>msa</td>\n",
1165
+ " <td>AZ</td>\n",
1166
+ " <td>all homes</td>\n",
1167
+ " <td>2023-12-09</td>\n",
1168
+ " <td>0.981498</td>\n",
1169
+ " <td>485000.0</td>\n",
1170
+ " <td>503423.0</td>\n",
1171
+ " <td>503423.0</td>\n",
1172
+ " <td>0.658777</td>\n",
1173
+ " <td>0.977990</td>\n",
1174
+ " <td>0.100000</td>\n",
1175
+ " <td>0.970073</td>\n",
1176
+ " <td>0.981278</td>\n",
1177
+ " <td>0.600000</td>\n",
1178
+ " <td>0.114463</td>\n",
1179
+ " </tr>\n",
1180
+ " </tbody>\n",
1181
+ "</table>\n",
1182
+ "<p>255024 rows × 18 columns</p>\n",
1183
+ "</div>"
1184
+ ],
1185
+ "text/plain": [
1186
+ " Region ID Size Rank Region Region Type State \\\n",
1187
+ "0 102001 0 United States country NaN \n",
1188
+ "1 102001 0 United States country NaN \n",
1189
+ "2 102001 0 United States country NaN \n",
1190
+ "3 102001 0 United States country NaN \n",
1191
+ "4 102001 0 United States country NaN \n",
1192
+ "... ... ... ... ... ... \n",
1193
+ "255019 845160 198 Prescott Valley, AZ msa AZ \n",
1194
+ "255020 845160 198 Prescott Valley, AZ msa AZ \n",
1195
+ "255021 845160 198 Prescott Valley, AZ msa AZ \n",
1196
+ "255022 845160 198 Prescott Valley, AZ msa AZ \n",
1197
+ "255023 845160 198 Prescott Valley, AZ msa AZ \n",
1198
+ "\n",
1199
+ " Home Type Date Median Sale to List Ratio Median Sale Price \\\n",
1200
+ "0 SFR 2008-02-02 NaN 172000.0 \n",
1201
+ "1 SFR 2008-02-09 NaN 165400.0 \n",
1202
+ "2 SFR 2008-02-16 NaN 168000.0 \n",
1203
+ "3 SFR 2008-02-23 NaN 167600.0 \n",
1204
+ "4 SFR 2008-03-01 NaN 168100.0 \n",
1205
+ "... ... ... ... ... \n",
1206
+ "255019 all homes 2023-11-11 0.985132 515000.0 \n",
1207
+ "255020 all homes 2023-11-18 0.972559 510000.0 \n",
1208
+ "255021 all homes 2023-11-25 0.979644 484500.0 \n",
1209
+ "255022 all homes 2023-12-02 0.978261 538000.0 \n",
1210
+ "255023 all homes 2023-12-09 0.981498 485000.0 \n",
1211
+ "\n",
1212
+ " Median Sale Price (Smoothed) (Seasonally Adjusted) \\\n",
1213
+ "0 NaN \n",
1214
+ "1 NaN \n",
1215
+ "2 NaN \n",
1216
+ "3 NaN \n",
1217
+ "4 NaN \n",
1218
+ "... ... \n",
1219
+ "255019 480020.0 \n",
1220
+ "255020 476901.0 \n",
1221
+ "255021 496540.0 \n",
1222
+ "255022 510491.0 \n",
1223
+ "255023 503423.0 \n",
1224
+ "\n",
1225
+ " Median Sale Price (Smoothed) % Sold Below List (Smoothed) \\\n",
1226
+ "0 NaN NaN \n",
1227
+ "1 NaN NaN \n",
1228
+ "2 NaN NaN \n",
1229
+ "3 167600.0 NaN \n",
1230
+ "4 168100.0 NaN \n",
1231
+ "... ... ... \n",
1232
+ "255019 480020.0 0.651221 \n",
1233
+ "255020 476901.0 0.659583 \n",
1234
+ "255021 496540.0 0.669387 \n",
1235
+ "255022 510491.0 0.678777 \n",
1236
+ "255023 503423.0 0.658777 \n",
1237
+ "\n",
1238
+ " Median Sale to List Ratio (Smoothed) % Sold Above List \\\n",
1239
+ "0 NaN NaN \n",
1240
+ "1 NaN NaN \n",
1241
+ "2 NaN NaN \n",
1242
+ "3 NaN NaN \n",
1243
+ "4 NaN NaN \n",
1244
+ "... ... ... \n",
1245
+ "255019 0.982460 0.080000 \n",
1246
+ "255020 0.980362 0.142857 \n",
1247
+ "255021 0.979179 0.088235 \n",
1248
+ "255022 0.978899 0.126761 \n",
1249
+ "255023 0.977990 0.100000 \n",
1250
+ "\n",
1251
+ " Mean Sale to List Ratio (Smoothed) Mean Sale to List Ratio \\\n",
1252
+ "0 NaN NaN \n",
1253
+ "1 NaN NaN \n",
1254
+ "2 NaN NaN \n",
1255
+ "3 NaN NaN \n",
1256
+ "4 NaN NaN \n",
1257
+ "... ... ... \n",
1258
+ "255019 0.978546 0.983288 \n",
1259
+ "255020 0.972912 0.958341 \n",
1260
+ "255021 0.971177 0.973797 \n",
1261
+ "255022 0.970576 0.966876 \n",
1262
+ "255023 0.970073 0.981278 \n",
1263
+ "\n",
1264
+ " % Sold Below List % Sold Above List (Smoothed) \n",
1265
+ "0 NaN NaN \n",
1266
+ "1 NaN NaN \n",
1267
+ "2 NaN NaN \n",
1268
+ "3 NaN NaN \n",
1269
+ "4 NaN NaN \n",
1270
+ "... ... ... \n",
1271
+ "255019 0.680000 0.119711 \n",
1272
+ "255020 0.625000 0.120214 \n",
1273
+ "255021 0.705882 0.107185 \n",
1274
+ "255022 0.704225 0.109463 \n",
1275
+ "255023 0.600000 0.114463 \n",
1276
+ "\n",
1277
+ "[255024 rows x 18 columns]"
1278
+ ]
1279
+ },
1280
+ "execution_count": 53,
1281
+ "metadata": {},
1282
+ "output_type": "execute_result"
1283
+ }
1284
+ ],
1285
+ "source": [
1286
+ "final_df[\"Date\"] = pd.to_datetime(final_df[\"Date\"], format=\"%Y-%m-%d\")\n",
1287
+ "\n",
1288
+ "final_df"
1289
+ ]
1290
+ },
1291
+ {
1292
+ "cell_type": "code",
1293
+ "execution_count": 54,
1294
  "metadata": {},
1295
  "outputs": [],
1296
  "source": [
processors/sales.py ADDED
@@ -0,0 +1,113 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ # coding: utf-8
3
+
4
+ # In[2]:
5
+
6
+
7
+ import pandas as pd
8
+ import os
9
+
10
+ from helpers import (
11
+ get_combined_df,
12
+ save_final_df_as_jsonl,
13
+ handle_slug_column_mappings,
14
+ set_home_type,
15
+ )
16
+
17
+
18
+ # In[3]:
19
+
20
+
21
+ DATA_DIR = "../data"
22
+ PROCESSED_DIR = "../processed/"
23
+ FACET_DIR = "sales/"
24
+ FULL_DATA_DIR_PATH = os.path.join(DATA_DIR, FACET_DIR)
25
+ FULL_PROCESSED_DIR_PATH = os.path.join(PROCESSED_DIR, FACET_DIR)
26
+
27
+
28
+ # In[4]:
29
+
30
+
31
+ exclude_columns = [
32
+ "RegionID",
33
+ "SizeRank",
34
+ "RegionName",
35
+ "RegionType",
36
+ "StateName",
37
+ "Home Type",
38
+ ]
39
+
40
+ slug_column_mappings = {
41
+ "_median_sale_to_list_": "Median Sale to List Ratio",
42
+ "_mean_sale_to_list_": "Mean Sale to List Ratio",
43
+ "_median_sale_price_": "Median Sale Price",
44
+ "_pct_sold_above_list_": "% Sold Above List",
45
+ "_pct_sold_below_list_": "% Sold Below List",
46
+ "_sales_count_now_": "Nowcast",
47
+ }
48
+
49
+ data_frames = []
50
+
51
+ for filename in os.listdir(FULL_DATA_DIR_PATH):
52
+ if filename.endswith(".csv"):
53
+ print("processing " + filename)
54
+ # ignore monthly data for now since it is redundant
55
+ if "month" in filename:
56
+ continue
57
+
58
+ cur_df = pd.read_csv(os.path.join(FULL_DATA_DIR_PATH, filename))
59
+
60
+ cur_df = set_home_type(cur_df, filename)
61
+
62
+ data_frames = handle_slug_column_mappings(
63
+ data_frames, slug_column_mappings, exclude_columns, filename, cur_df
64
+ )
65
+
66
+
67
+ combined_df = get_combined_df(
68
+ data_frames,
69
+ [
70
+ "RegionID",
71
+ "SizeRank",
72
+ "RegionName",
73
+ "RegionType",
74
+ "StateName",
75
+ "Home Type",
76
+ "Date",
77
+ ],
78
+ )
79
+
80
+ combined_df
81
+
82
+
83
+ # In[52]:
84
+
85
+
86
+ # Adjust column names
87
+ final_df = combined_df.rename(
88
+ columns={
89
+ "RegionID": "Region ID",
90
+ "SizeRank": "Size Rank",
91
+ "RegionName": "Region",
92
+ "RegionType": "Region Type",
93
+ "StateName": "State",
94
+ }
95
+ )
96
+
97
+ final_df["Date"] = pd.to_datetime(final_df["Date"])
98
+ final_df.sort_values(by=["Region ID", "Home Type", "Date"])
99
+
100
+
101
+ # In[53]:
102
+
103
+
104
+ final_df["Date"] = pd.to_datetime(final_df["Date"], format="%Y-%m-%d")
105
+
106
+ final_df
107
+
108
+
109
+ # In[54]:
110
+
111
+
112
+ save_final_df_as_jsonl(FULL_PROCESSED_DIR_PATH, final_df)
113
+