zillow / processors /home_values.py
misikoff's picture
fix:update
69c22e0
raw
history blame
No virus
4.4 kB
#!/usr/bin/env python
# coding: utf-8
# In[1]:
import pandas as pd
import os
from helpers import (
get_combined_df,
save_final_df_as_jsonl,
handle_slug_column_mappings,
)
# In[2]:
DATA_DIR = "../data"
PROCESSED_DIR = "../processed/"
FACET_DIR = "home_values/"
FULL_DATA_DIR_PATH = os.path.join(DATA_DIR, FACET_DIR)
FULL_PROCESSED_DIR_PATH = os.path.join(PROCESSED_DIR, FACET_DIR)
# In[5]:
data_frames = []
slug_column_mappings = {
"_tier_0.0_0.33_": "Bottom Tier ZHVI",
"_tier_0.33_0.67_": "Mid Tier ZHVI",
"_tier_0.67_1.0_": "Top Tier ZHVI",
"": "ZHVI",
}
for filename in os.listdir(FULL_DATA_DIR_PATH):
if filename.endswith(".csv"):
print("processing " + filename)
cur_df = pd.read_csv(os.path.join(FULL_DATA_DIR_PATH, filename))
exclude_columns = [
"RegionID",
"SizeRank",
"RegionName",
"RegionType",
"StateName",
"Bedroom Count",
"Home Type",
]
if "Zip" in filename:
continue
if "Neighborhood" in filename:
continue
if "City" in filename:
continue
if "Metro" in filename:
continue
if "County" in filename:
continue
if "City" in filename:
exclude_columns = exclude_columns + ["State", "Metro", "CountyName"]
elif "Zip" in filename:
exclude_columns = exclude_columns + [
"State",
"City",
"Metro",
"CountyName",
]
elif "County" in filename:
exclude_columns = exclude_columns + [
"State",
"Metro",
"StateCodeFIPS",
"MunicipalCodeFIPS",
]
elif "Neighborhood" in filename:
exclude_columns = exclude_columns + [
"State",
"City",
"Metro",
"CountyName",
]
if "_bdrmcnt_1_" in filename:
cur_df["Bedroom Count"] = "1-Bedroom"
elif "_bdrmcnt_2_" in filename:
cur_df["Bedroom Count"] = "2-Bedrooms"
elif "_bdrmcnt_3_" in filename:
cur_df["Bedroom Count"] = "3-Bedrooms"
elif "_bdrmcnt_4_" in filename:
cur_df["Bedroom Count"] = "4-Bedrooms"
elif "_bdrmcnt_5_" in filename:
cur_df["Bedroom Count"] = "5+-Bedrooms"
else:
cur_df["Bedroom Count"] = "All Bedrooms"
if "_uc_sfr_" in filename:
cur_df["Home Type"] = "SFR"
elif "_uc_sfrcondo_" in filename:
cur_df["Home Type"] = "all homes (SFR/condo)"
elif "_uc_condo_" in filename:
cur_df["Home Type"] = "condo"
cur_df["StateName"] = cur_df["StateName"].astype(str)
cur_df["RegionName"] = cur_df["RegionName"].astype(str)
data_frames = handle_slug_column_mappings(
data_frames, slug_column_mappings, exclude_columns, filename, cur_df
)
combined_df = get_combined_df(
data_frames,
[
"RegionID",
"SizeRank",
"RegionName",
"RegionType",
"StateName",
"Bedroom Count",
"Home Type",
"Date",
],
)
combined_df
# In[11]:
final_df = combined_df
for index, row in final_df.iterrows():
if row["RegionType"] == "city":
final_df.at[index, "City"] = row["RegionName"]
elif row["RegionType"] == "county":
final_df.at[index, "County"] = row["RegionName"]
if row["RegionType"] == "state":
final_df.at[index, "StateName"] = row["RegionName"]
# coalesce State and StateName columns
# final_df["State"] = final_df["State"].combine_first(final_df["StateName"])
# final_df["County"] = final_df["County"].combine_first(final_df["CountyName"])
# final_df = final_df.drop(
# columns=[
# "StateName",
# # "CountyName"
# ]
# )
final_df
# In[12]:
final_df = final_df.rename(
columns={
"RegionID": "Region ID",
"SizeRank": "Size Rank",
"RegionName": "Region",
"RegionType": "Region Type",
"StateCodeFIPS": "State Code FIPS",
"StateName": "State",
"MunicipalCodeFIPS": "Municipal Code FIPS",
}
)
final_df
# In[13]:
save_final_df_as_jsonl(FULL_PROCESSED_DIR_PATH, final_df)