Spaces:
Runtime error
Runtime error
File size: 5,001 Bytes
8062140 59fcb36 8062140 f86b580 e624137 d47d510 59fcb36 6d82468 59fcb36 d47d510 59fcb36 c7ce2ca d47d510 59fcb36 d47d510 59fcb36 ac158aa 3ddbf9f 8062140 59fcb36 8062140 59fcb36 ac158aa 59fcb36 ac158aa 59fcb36 ac158aa 59fcb36 8062140 59fcb36 8062140 59fcb36 8062140 ac158aa 59fcb36 ac158aa 59fcb36 ac158aa 8062140 59fcb36 8062140 d47d510 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 |
import gradio as gr
import hopsworks
import joblib
import pandas as pd
import numpy as np
import folium
import sklearn.preprocessing as proc
import json
import time
from datetime import timedelta, datetime
from branca.element import Figure
from functions import decode_features, get_weather_data, get_weather_df, get_weather_json_quick
#import functions
def greet(total_pred_days):
# print("hi")
project = hopsworks.login()
# print("connected")
# #api = project.get_dataset_api()
#
# # The latest available data timestamp
# start_time = 1649196000000
# # end_time = 1670972400000
#start_date = datetime.now() - timedelta(days=1)
#start_time = int(start_date.timestamp()) * 1000
#print("Time Stamp Set. ")
#print("latest_date")
mr=project.get_model_registry()
model = mr.get_model("temp_model_new", version=1)
model_dir=model.download()
model1 = mr.get_model("tempmax_model_new", version=1)
model_dir1=model1.download()
model2 = mr.get_model("tempmin_model_new", version=1)
model_dir2=model2.download()
model = joblib.load(model_dir + "/model_temp_new.pkl")
model1 = joblib.load(model_dir1 + "/model_tempmax_new.pkl")
model2 = joblib.load(model_dir2+ "/model_tempmin_new.pkl")
print("temp_model is now right")
#X = feature_view.get_batch_data(start_time=start_time)
#latest_date_unix = str(X.datetime.values[0])[:10]
#latest_date = time.ctime(int(latest_date_unix))
# cities = [city_tuple[0] for city_tuple in cities_coords.keys()]
str1 = ""
if(total_pred_days == ""):
return "Empty input"
count = int(total_pred_days)
if count > 14:
str1 += "Warning: 14 days at most. " + '\n'
count = 14
if count <0:
str1 = "Invalid input."
return str1
# Get weather data
fs = project.get_feature_store()
print("get the store")
feature_view = fs.get_feature_view(
name = 'weathernew_fv',
version = 1
)
print("get the fv")
global X
X = pd.DataFrame()
for i in range(count+1):
# Get, rename column and rescale
next_day_date = datetime.today() + timedelta(days=i)
next_day = next_day_date.strftime ('%Y-%m-%d')
print(next_day)
json = get_weather_json_quick(next_day)
temp = get_weather_data(json)
print("Raw data")
print(temp)
X = X.append(temp, ignore_index=True)
# X reshape
X.drop('preciptype', inplace = True, axis = 1)
X.drop('severerisk', inplace = True, axis = 1)
X.drop('stations', inplace = True, axis = 1)
X.drop('sunrise', inplace = True, axis = 1)
X.drop('sunset', inplace = True, axis = 1)
X.drop('moonphase', inplace = True, axis = 1)
X.drop('description', inplace = True, axis = 1)
X.drop('icon', inplace = True, axis = 1)
X = X.drop(columns=["datetime", "temp", "tempmax", "tempmin", "sunriseEpoch", "sunsetEpoch", "source", "datetimeEpoch", ]).fillna(0)
X = X.rename(columns={'pressure':'sealevelpressure'})
X = X.drop(columns = ['conditions'])
print("Check dataframe")
print(X)
print("Data batched.")
# Rescale
#X = decode_features(X, feature_view=feature_view)
# Data scaling
#category_cols = ['name','datetime','conditions', 'tempmin', 'tempmax', 'temp']
#mapping_transformers = {col_name:fs.get_transformation_function(name='standard_scaler') for col_name in col_names if col_name not in category_cols}
#category_cols = {col_name:fs.get_transformation_function(name='label_encoder') for col_name in category_cols if col_name not in ['datetime', 'tempmin', 'tempmax', 'temp']}
#mapping_transformers.update(category_cols)
# Data scaling
#category_cols = ['conditions']
cat_std_cols = ['feelslikemax','feelslikemin','feelslike','dew','humidity','precip','precipprob','precipcover','snow','snowdepth','windgust','windspeed','winddir','sealevelpressure','cloudcover','visibility','solarradiation','solarenergy','uvindex']
scaler_std = proc.StandardScaler()
#scaler_lb = proc.LabelEncoder()
X.insert(19,"conditions",0)
X[cat_std_cols] = scaler_std.fit_transform(X[cat_std_cols])
#X[category_cols] = scaler_std.transform(X[category_cols])
X.insert(0,"name",0)
# Predict
preds = model.predict(X)
preds1= model1.predict(X)
preds2= model2.predict(X)
for x in range(count):
if (x != 0):
str1 += (datetime.now() + timedelta(days=x)).strftime('%Y-%m-%d') + " predicted temperature: " +str(int(preds[len(preds) - count + x]))+ " predicted max temperature: " +str(int(preds1[len(preds1) - count + x]))+ " predicted min temperature: " +str(int(preds2[len(preds2) - count + x]))+"\n"
#print(str1)
return str1
demo = gr.Interface(fn=greet, inputs = "text", outputs="text")
if __name__ == "__main__":
demo.launch()
|