Spaces:
Sleeping
Sleeping
add testing
Browse files- chart_app.py +0 -270
- test_db_get.py +26 -0
chart_app.py
DELETED
@@ -1,270 +0,0 @@
|
|
1 |
-
import streamlit as st
|
2 |
-
import pandas as pd
|
3 |
-
from sqlalchemy import create_engine
|
4 |
-
from dotenv import load_dotenv
|
5 |
-
import yfinance as yf
|
6 |
-
import plotly.graph_objs as go
|
7 |
-
import datetime
|
8 |
-
from datetime import timedelta
|
9 |
-
import os
|
10 |
-
from pandas.tseries.offsets import BDay
|
11 |
-
from getDailyData import get_daily
|
12 |
-
load_dotenv()
|
13 |
-
|
14 |
-
# Get the data for daily first
|
15 |
-
data_daily, df_final_daily, final_row_daily = get_daily()
|
16 |
-
|
17 |
-
engine = create_engine(
|
18 |
-
f"mysql+mysqldb://{os.getenv('DATABASE_USERNAME')}:" \
|
19 |
-
f"{os.getenv('DATABASE_PASSWORD')}@{os.getenv('DATABASE_HOST')}/" \
|
20 |
-
f"{os.getenv('DATABASE')}?ssl_ca=ca-certificates.crt&ssl_mode=VERIFY_IDENTITY"
|
21 |
-
)
|
22 |
-
|
23 |
-
q = '''SELECT * FROM results where AsOf > '2022-06-01'
|
24 |
-
'''
|
25 |
-
|
26 |
-
df_all_results = pd.read_sql_query(q, con=engine.connect())
|
27 |
-
df_all_results['AsOf'] = df_all_results['AsOf'].dt.tz_localize('America/New_York')
|
28 |
-
|
29 |
-
# Get historical data
|
30 |
-
spx = yf.Ticker('^GSPC')
|
31 |
-
prices = spx.history(interval='30m', start=df_all_results.index.min(), )
|
32 |
-
df_all_results2 = df_all_results.merge(prices.reset_index()[['Datetime','Open','High','Low','Close']], left_on = 'AsOf', right_on = 'Datetime')
|
33 |
-
df_all_results2['Color'] = df_all_results2['Predicted'].apply(lambda x: 'green' if x >=0.6 else 'red' if x < 0.4 else 'yellow')
|
34 |
-
df_all_results2['PredDir'] = df_all_results2['Predicted'].apply(lambda x: 'Up' if x >=0.6 else 'Down' if x < 0.4 else 'Neutral')
|
35 |
-
|
36 |
-
date_select = datetime.datetime.today() - BDay(5)
|
37 |
-
|
38 |
-
if 'date_select' not in st.session_state:
|
39 |
-
st.session_state.date_select = date_select
|
40 |
-
|
41 |
-
date_select = st.date_input(
|
42 |
-
'Select data for chart',
|
43 |
-
value=date_select,
|
44 |
-
min_value=data_daily.index[0],
|
45 |
-
max_value=data_daily.index[-1]
|
46 |
-
)
|
47 |
-
|
48 |
-
# Load your data
|
49 |
-
df1 = df_all_results2.set_index('AsOf')
|
50 |
-
df1 = df1.loc[df1.index > str(date_select)]
|
51 |
-
|
52 |
-
dts = df1.groupby(df1.index.date).head(1).reset_index()['AsOf']
|
53 |
-
daily_closes = data_daily.loc[df1.index.date, 'PrevClose'].drop_duplicates().reset_index()
|
54 |
-
daily_closes['FirstBar'] = dts
|
55 |
-
levels = data_daily.loc[df1.index.date, ['H1','H2','L1','L2','Open']].drop_duplicates().reset_index()
|
56 |
-
levels['FirstBar'] = dts
|
57 |
-
|
58 |
-
# Create a candlestick trace with custom colors based on the CandleColor column
|
59 |
-
candlestick_trace = go.Candlestick(
|
60 |
-
x=df1.index,
|
61 |
-
open=df1['Open'],
|
62 |
-
high=df1['High'],
|
63 |
-
low=df1['Low'],
|
64 |
-
close=df1['Close'],
|
65 |
-
increasing_fillcolor='#3399ff',
|
66 |
-
decreasing_fillcolor='#ff5f5f',
|
67 |
-
increasing_line_color='#3399ff', # Color for decreasing candles
|
68 |
-
decreasing_line_color='#ff5f5f', # Color for decreasing candles
|
69 |
-
name='30m'
|
70 |
-
)
|
71 |
-
|
72 |
-
df_up = df1.loc[df1['PredDir']=='Up']
|
73 |
-
df_down = df1.loc[df1['PredDir']=='Down']
|
74 |
-
df_neutral = df1.loc[df1['PredDir']=='Neutral']
|
75 |
-
|
76 |
-
scatter_up = go.Scatter(
|
77 |
-
x=df_up.index,
|
78 |
-
y=df_up['High'] * 1.001,
|
79 |
-
mode='markers',
|
80 |
-
marker=dict(size=8),
|
81 |
-
marker_color=df_up['Color'],
|
82 |
-
marker_symbol='triangle-up',
|
83 |
-
name='Up'
|
84 |
-
)
|
85 |
-
|
86 |
-
scatter_down = go.Scatter(
|
87 |
-
x=df_down.index,
|
88 |
-
y=df_down['Low'] * 0.999,
|
89 |
-
mode='markers',
|
90 |
-
marker=dict(size=8),
|
91 |
-
marker_color=df_down['Color'],
|
92 |
-
marker_symbol='triangle-down',
|
93 |
-
name='Down'
|
94 |
-
)
|
95 |
-
|
96 |
-
scatter_neut = go.Scatter(
|
97 |
-
x=df_neutral.index,
|
98 |
-
y=df_neutral[['Open','High','Low','Close']].mean(axis=1),
|
99 |
-
mode='markers',
|
100 |
-
marker=dict(size=7),
|
101 |
-
marker_color=df_neutral['Color'],
|
102 |
-
marker_symbol='diamond-open',
|
103 |
-
name='Neutral'
|
104 |
-
)
|
105 |
-
|
106 |
-
# Create a layout
|
107 |
-
layout = go.Layout(
|
108 |
-
title=dict(text='OHLC Chart with Predictions', xanchor='center', yanchor='top', y=0.9,x=0.5),
|
109 |
-
|
110 |
-
xaxis=dict(title='Date'),
|
111 |
-
yaxis=dict(title='Price'),
|
112 |
-
template='plotly_dark',
|
113 |
-
xaxis_rangeslider_visible=False,
|
114 |
-
width=750,
|
115 |
-
height=500
|
116 |
-
)
|
117 |
-
|
118 |
-
# Create a figure
|
119 |
-
fig = go.Figure(data=[candlestick_trace, scatter_up, scatter_neut, scatter_down], layout=layout)
|
120 |
-
|
121 |
-
fig.update_xaxes(
|
122 |
-
rangebreaks=[
|
123 |
-
# NOTE: Below values are bound (not single values), ie. hide x to y
|
124 |
-
dict(bounds=["sat", "mon"]), # hide weekends, eg. hide sat to before mon
|
125 |
-
dict(bounds=[16, 9.5], pattern="hour"), # hide hours outside of 9.30am-4pm
|
126 |
-
# dict(values=["2019-12-25", "2020-12-24"]) # hide holidays (Christmas and New Year's, etc)
|
127 |
-
]
|
128 |
-
)
|
129 |
-
|
130 |
-
fig.update_layout(
|
131 |
-
shapes = [dict(
|
132 |
-
x0=d-timedelta(minutes=15), x1=d-timedelta(minutes=15), y0=0, y1=1, xref='x', yref='paper',
|
133 |
-
line_width=0.5, opacity=0.5, line_dash='dot') for d in df1.loc[df1['ModelNum']==0].index],
|
134 |
-
legend=dict(yanchor="top", y=1.05, xanchor="center", x=0.5, orientation='h'),
|
135 |
-
margin=dict(l=20, r=20, t=80, b=20)
|
136 |
-
)
|
137 |
-
|
138 |
-
|
139 |
-
# Define the y-positions for your horizontal lines
|
140 |
-
pairs = [(start, level) for start, level in zip(daily_closes['FirstBar'], daily_closes['PrevClose'])]
|
141 |
-
|
142 |
-
# Add horizontal lines to the figure
|
143 |
-
for pair in pairs:
|
144 |
-
start = pair[0]
|
145 |
-
end = start + BDay(1) - timedelta(minutes=15)
|
146 |
-
level = pair[1]
|
147 |
-
fig.add_shape(
|
148 |
-
type="line",
|
149 |
-
x0=start,
|
150 |
-
x1=end,
|
151 |
-
y0=level,
|
152 |
-
y1=level,
|
153 |
-
xref='x',
|
154 |
-
yref='y',
|
155 |
-
line=dict(
|
156 |
-
width=0.5,
|
157 |
-
dash="dot",
|
158 |
-
),
|
159 |
-
)
|
160 |
-
|
161 |
-
for start in levels['FirstBar']:
|
162 |
-
end = start + BDay(1)-timedelta(minutes=15)
|
163 |
-
vals = levels.loc[levels['FirstBar']==start, ['H1','H2','L1','L2','Open']].values[0]
|
164 |
-
H1 = vals[0]
|
165 |
-
H2 = vals[1]
|
166 |
-
L1 = vals[2]
|
167 |
-
L2 = vals[3]
|
168 |
-
Open = vals[4]
|
169 |
-
# Plot H1
|
170 |
-
fig.add_shape(
|
171 |
-
type="line",
|
172 |
-
x0=start,
|
173 |
-
x1=end,
|
174 |
-
y0=H1,
|
175 |
-
y1=H1,
|
176 |
-
xref='x',
|
177 |
-
yref='y',
|
178 |
-
line=dict(
|
179 |
-
width=0.5,
|
180 |
-
dash="solid",
|
181 |
-
color="#ff5f5f"
|
182 |
-
),
|
183 |
-
)
|
184 |
-
# Plot H2
|
185 |
-
fig.add_shape(
|
186 |
-
type="line",
|
187 |
-
x0=start,
|
188 |
-
x1=end,
|
189 |
-
y0=H2,
|
190 |
-
y1=H2,
|
191 |
-
xref='x',
|
192 |
-
yref='y',
|
193 |
-
line=dict(
|
194 |
-
width=1,
|
195 |
-
dash="solid",
|
196 |
-
color="#ff5f5f"
|
197 |
-
),
|
198 |
-
)
|
199 |
-
# Plot L1
|
200 |
-
fig.add_shape(
|
201 |
-
type="line",
|
202 |
-
x0=start,
|
203 |
-
x1=end,
|
204 |
-
y0=L1,
|
205 |
-
y1=L1,
|
206 |
-
xref='x',
|
207 |
-
yref='y',
|
208 |
-
line=dict(
|
209 |
-
width=0.5,
|
210 |
-
dash="solid",
|
211 |
-
color="#3399ff"
|
212 |
-
),
|
213 |
-
)
|
214 |
-
# Plot L2
|
215 |
-
fig.add_shape(
|
216 |
-
type="line",
|
217 |
-
x0=start,
|
218 |
-
x1=end,
|
219 |
-
y0=L2,
|
220 |
-
y1=L2,
|
221 |
-
xref='x',
|
222 |
-
yref='y',
|
223 |
-
line=dict(
|
224 |
-
width=1,
|
225 |
-
dash="solid",
|
226 |
-
color="#3399ff"
|
227 |
-
),
|
228 |
-
)
|
229 |
-
# Plot Open
|
230 |
-
fig.add_shape(
|
231 |
-
type="line",
|
232 |
-
x0=start,
|
233 |
-
x1=end,
|
234 |
-
y0=Open,
|
235 |
-
y1=Open,
|
236 |
-
xref='x',
|
237 |
-
yref='y',
|
238 |
-
line=dict(
|
239 |
-
width=1,
|
240 |
-
dash="solid",
|
241 |
-
color="#cccccc"
|
242 |
-
),
|
243 |
-
)
|
244 |
-
|
245 |
-
fig.for_each_xaxis(lambda x: x.update(showgrid=False))
|
246 |
-
fig.for_each_yaxis(lambda x: x.update(showgrid=False))
|
247 |
-
|
248 |
-
# Show the figure
|
249 |
-
st.plotly_chart(fig, use_container_width=True)
|
250 |
-
|
251 |
-
# Important levels
|
252 |
-
df_levels = pd.DataFrame(levels[['H2','H1','Open','L1','L2']].iloc[-1]).round(2)
|
253 |
-
df_levels.columns = ['Levels']
|
254 |
-
df_levels.astype(float).round(2)
|
255 |
-
|
256 |
-
# For historical reference
|
257 |
-
df_all_results['Symbol'] = df_all_results['Predicted'].apply(lambda x: '🟩' if x >=0.6 else '🟥' if x < 0.4 else '🟨')
|
258 |
-
today_df = df_all_results[['AsOf','Symbol','Predicted','CalibPredicted','Pvalue']].tail(13)[::-1]
|
259 |
-
today_df = today_df.set_index('AsOf', drop=True)
|
260 |
-
df_show = (today_df.style
|
261 |
-
.format(formatter={
|
262 |
-
'Predicted':'{:.1%}',
|
263 |
-
'CalibPredicted':'{:.1%}',
|
264 |
-
'Pvalue':'{:.2f}',
|
265 |
-
})
|
266 |
-
)
|
267 |
-
|
268 |
-
|
269 |
-
st.dataframe(df_levels.T,use_container_width=True)
|
270 |
-
st.dataframe(df_show,use_container_width=True)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
test_db_get.py
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env python3
|
2 |
+
|
3 |
+
# Function should get the data and run the whole model, return a single prediction based on the time
|
4 |
+
from getDailyData import get_daily
|
5 |
+
from model_intra_v3 import walk_forward_validation
|
6 |
+
from model_day_v2 import walk_forward_validation_seq as walk_forward_validation_daily
|
7 |
+
import pandas as pd
|
8 |
+
import json
|
9 |
+
from dbConn import connection, engine, insert_dataframe_to_sql
|
10 |
+
import numpy as np
|
11 |
+
from datetime import time, timedelta
|
12 |
+
import datetime
|
13 |
+
from pandas.tseries.offsets import BDay
|
14 |
+
import holidays
|
15 |
+
from dotenv import load_dotenv
|
16 |
+
load_dotenv()
|
17 |
+
|
18 |
+
def test_db():
|
19 |
+
# Get results, run calibration and pvalue
|
20 |
+
df_results = pd.read_sql_query(f'select * from results where ModelNum = 12', con = engine)
|
21 |
+
res = df_results.iloc[-1]
|
22 |
+
return res
|
23 |
+
|
24 |
+
if __name__ == '__main__':
|
25 |
+
p = test_db()
|
26 |
+
print(p)
|