Spaces:
Sleeping
Sleeping
Bhanu Prasanna
commited on
Commit
·
e41e3ea
1
Parent(s):
4517f3b
Update
Browse files- Notebooks/sample.ipynb +90 -0
- main.py +14 -3
Notebooks/sample.ipynb
CHANGED
@@ -160,6 +160,96 @@
|
|
160 |
"collapsed": false
|
161 |
},
|
162 |
"id": "162b263160a53be2"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
163 |
}
|
164 |
],
|
165 |
"metadata": {
|
|
|
160 |
"collapsed": false
|
161 |
},
|
162 |
"id": "162b263160a53be2"
|
163 |
+
},
|
164 |
+
{
|
165 |
+
"cell_type": "code",
|
166 |
+
"outputs": [
|
167 |
+
{
|
168 |
+
"name": "stderr",
|
169 |
+
"output_type": "stream",
|
170 |
+
"text": [
|
171 |
+
"[*********************100%%**********************] 1 of 1 completed\n",
|
172 |
+
"[*********************100%%**********************] 1 of 1 completed\n",
|
173 |
+
"[*********************100%%**********************] 1 of 1 completed\n",
|
174 |
+
"\n",
|
175 |
+
"1 Failed download:\n",
|
176 |
+
"['BHUSANSTL.NS']: Exception('%ticker%: No price data found, symbol may be delisted (1d 2023-01-01 -> 2024-01-01)')\n",
|
177 |
+
"[*********************100%%**********************] 1 of 1 completed"
|
178 |
+
]
|
179 |
+
},
|
180 |
+
{
|
181 |
+
"name": "stdout",
|
182 |
+
"output_type": "stream",
|
183 |
+
"text": [
|
184 |
+
" AAPL GOOG MSFT\n",
|
185 |
+
"Date \n",
|
186 |
+
"2023-01-03 00:00:00 124.216301 89.699997 237.036026\n",
|
187 |
+
"2023-01-04 00:00:00 125.497498 88.709999 226.667297\n",
|
188 |
+
"2023-01-05 00:00:00 124.166634 86.769997 219.949371\n",
|
189 |
+
"2023-01-06 00:00:00 128.735229 88.160004 222.541565\n",
|
190 |
+
"2023-01-09 00:00:00 129.261612 88.800003 224.708313\n"
|
191 |
+
]
|
192 |
+
},
|
193 |
+
{
|
194 |
+
"name": "stderr",
|
195 |
+
"output_type": "stream",
|
196 |
+
"text": [
|
197 |
+
"\n"
|
198 |
+
]
|
199 |
+
}
|
200 |
+
],
|
201 |
+
"source": [
|
202 |
+
"import yfinance as yf\n",
|
203 |
+
"import pandas as pd\n",
|
204 |
+
"\n",
|
205 |
+
"def fetch_adj_close(ticker):\n",
|
206 |
+
" try:\n",
|
207 |
+
" data = yf.download(ticker, start=\"2023-01-01\", end=\"2024-01-01\")\n",
|
208 |
+
" adj_close = data['Adj Close'] # Extracting adjusted close prices\n",
|
209 |
+
" adj_close = pd.DataFrame(adj_close) # Convert Series to DataFrame\n",
|
210 |
+
" adj_close.columns = [ticker] # Rename column to ticker symbol\n",
|
211 |
+
" return adj_close\n",
|
212 |
+
" except Exception as e:\n",
|
213 |
+
" print(f\"Error fetching data for {ticker}: {e}\")\n",
|
214 |
+
" return None\n",
|
215 |
+
"\n",
|
216 |
+
"# List of tickers you want to fetch data for\n",
|
217 |
+
"tickers = [\"AAPL\", \"GOOG\", \"BHUSANSTL.NS\", \"MSFT\"]\n",
|
218 |
+
"\n",
|
219 |
+
"# Create an empty DataFrame to store the adjusted close prices\n",
|
220 |
+
"adj_close_df = pd.DataFrame()\n",
|
221 |
+
"\n",
|
222 |
+
"for ticker in tickers:\n",
|
223 |
+
" data = fetch_adj_close(ticker)\n",
|
224 |
+
" if data is not None:\n",
|
225 |
+
" adj_close_df = pd.concat([adj_close_df, data], axis=1) # Concatenate the data to the DataFrame\n",
|
226 |
+
"\n",
|
227 |
+
"# Set the date as the index\n",
|
228 |
+
"adj_close_df.index.name = 'Date'\n",
|
229 |
+
"\n",
|
230 |
+
"# Drop columns with any NaN values (i.e., for invalid tickers)\n",
|
231 |
+
"adj_close_df.dropna(axis=1, how='any', inplace=True)\n",
|
232 |
+
"\n",
|
233 |
+
"print(adj_close_df.head())\n"
|
234 |
+
],
|
235 |
+
"metadata": {
|
236 |
+
"collapsed": false,
|
237 |
+
"ExecuteTime": {
|
238 |
+
"end_time": "2024-03-08T23:14:00.320054Z",
|
239 |
+
"start_time": "2024-03-08T23:14:00.259300Z"
|
240 |
+
}
|
241 |
+
},
|
242 |
+
"id": "fdb2a5f6391b0ca2",
|
243 |
+
"execution_count": 5
|
244 |
+
},
|
245 |
+
{
|
246 |
+
"cell_type": "code",
|
247 |
+
"outputs": [],
|
248 |
+
"source": [],
|
249 |
+
"metadata": {
|
250 |
+
"collapsed": false
|
251 |
+
},
|
252 |
+
"id": "b34afac23ee07c5e"
|
253 |
}
|
254 |
],
|
255 |
"metadata": {
|
main.py
CHANGED
@@ -38,10 +38,14 @@ company_name = list_df["Name"].to_list()
|
|
38 |
company_symbol = (list_df["Ticker"] + ".NS").to_list()
|
39 |
|
40 |
company_dict = dict()
|
|
|
41 |
|
42 |
for CSymbol, CName in zip(company_symbol, company_name):
|
43 |
company_dict[CName] = CSymbol
|
44 |
|
|
|
|
|
|
|
45 |
st.markdown(
|
46 |
"""
|
47 |
<style>
|
@@ -63,7 +67,14 @@ num_tick = len(com_sel)
|
|
63 |
if num_tick > 1:
|
64 |
|
65 |
com_data = yf.download(com_sel, start="1900-01-01", end="2024-03-08")["Adj Close"]
|
|
|
|
|
66 |
com_data.dropna(inplace=True)
|
|
|
|
|
|
|
|
|
|
|
67 |
|
68 |
com_sel = com_data.columns.to_list()
|
69 |
com_sel_name.sort()
|
@@ -91,7 +102,7 @@ if num_tick > 1:
|
|
91 |
# Put the weights into a data frame to see them better.
|
92 |
weights_df = pd.DataFrame(
|
93 |
data={
|
94 |
-
"company_name":
|
95 |
"random_weights": rand_weig,
|
96 |
"rebalance_weights": rebal_weig,
|
97 |
}
|
@@ -187,7 +198,7 @@ if num_tick > 1:
|
|
187 |
|
188 |
max_sharpe_weights_df = pd.DataFrame(
|
189 |
data={
|
190 |
-
"company_name":
|
191 |
"random_weights": max_sharpe_ratio["Portfolio Weights"],
|
192 |
}
|
193 |
)
|
@@ -201,7 +212,7 @@ if num_tick > 1:
|
|
201 |
|
202 |
min_volatility_weights_df = pd.DataFrame(
|
203 |
data={
|
204 |
-
"company_name":
|
205 |
"random_weights": min_volatility["Portfolio Weights"],
|
206 |
}
|
207 |
)
|
|
|
38 |
company_symbol = (list_df["Ticker"] + ".NS").to_list()
|
39 |
|
40 |
company_dict = dict()
|
41 |
+
company_symbol_dict = dict()
|
42 |
|
43 |
for CSymbol, CName in zip(company_symbol, company_name):
|
44 |
company_dict[CName] = CSymbol
|
45 |
|
46 |
+
for CSymbol, CName in zip(company_symbol, company_name):
|
47 |
+
company_symbol_dict[CSymbol] = CName
|
48 |
+
|
49 |
st.markdown(
|
50 |
"""
|
51 |
<style>
|
|
|
67 |
if num_tick > 1:
|
68 |
|
69 |
com_data = yf.download(com_sel, start="1900-01-01", end="2024-03-08")["Adj Close"]
|
70 |
+
for i in com_data.columns:
|
71 |
+
com_data.dropna(axis=1, how='all', inplace=True)
|
72 |
com_data.dropna(inplace=True)
|
73 |
+
num_tick = len(com_data.columns)
|
74 |
+
|
75 |
+
com_sel_name_temp = []
|
76 |
+
for i in com_data.columns:
|
77 |
+
com_sel_name_temp.append(company_symbol_dict[i])
|
78 |
|
79 |
com_sel = com_data.columns.to_list()
|
80 |
com_sel_name.sort()
|
|
|
102 |
# Put the weights into a data frame to see them better.
|
103 |
weights_df = pd.DataFrame(
|
104 |
data={
|
105 |
+
"company_name": com_sel_name_temp,
|
106 |
"random_weights": rand_weig,
|
107 |
"rebalance_weights": rebal_weig,
|
108 |
}
|
|
|
198 |
|
199 |
max_sharpe_weights_df = pd.DataFrame(
|
200 |
data={
|
201 |
+
"company_name": com_sel_name_temp,
|
202 |
"random_weights": max_sharpe_ratio["Portfolio Weights"],
|
203 |
}
|
204 |
)
|
|
|
212 |
|
213 |
min_volatility_weights_df = pd.DataFrame(
|
214 |
data={
|
215 |
+
"company_name": com_sel_name_temp,
|
216 |
"random_weights": min_volatility["Portfolio Weights"],
|
217 |
}
|
218 |
)
|