InNoobWeTrust
commited on
Commit
·
7065556
1
Parent(s):
0fa6f3b
fix: yfinance price data column name
Browse files- Makefile.toml +30 -0
- df.py +23 -15
- pkgx.yml +2 -0
- requirements.txt +1 -1
Makefile.toml
ADDED
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[tasks.rust]
|
2 |
+
script_runner = "@rust"
|
3 |
+
script = '''
|
4 |
+
//! ```cargo
|
5 |
+
//! [dependencies]
|
6 |
+
//! yahoo_finance_api = "*"
|
7 |
+
//! tokio = { version = "*", features = ["rt-multi-thread", "macros"] }
|
8 |
+
//! tokio-test = "*"
|
9 |
+
//! time = "*"
|
10 |
+
//! ```
|
11 |
+
use yahoo_finance_api as yahoo;
|
12 |
+
use std::time::{Duration, UNIX_EPOCH};
|
13 |
+
use tokio_test;
|
14 |
+
|
15 |
+
fn main() {
|
16 |
+
let provider = yahoo::YahooConnector::new().unwrap();
|
17 |
+
let response = tokio_test::block_on(provider.get_quote_range("BTC-USD", "1d", "2y")).unwrap();
|
18 |
+
let quotes = response.quotes().unwrap();
|
19 |
+
println!("BTC's quotes: {:?}", quotes);
|
20 |
+
let response = tokio_test::block_on(provider.get_quote_range("ETH-USD", "1d", "2y")).unwrap();
|
21 |
+
let quotes = response.quotes().unwrap();
|
22 |
+
println!("ETH's quotes: {:?}", quotes);
|
23 |
+
}
|
24 |
+
'''
|
25 |
+
|
26 |
+
[tasks.coinpaprika]
|
27 |
+
script = '''
|
28 |
+
curl "https://api.coinpaprika.com/v1/tickers/btc-bitcoin/historical?start=$(date -I -v-1y -v+2d)&interval=1d" | jq '.[].price' | uplot line
|
29 |
+
curl "https://api.coinpaprika.com/v1/tickers/eth-the-ticker-is-eth/historical?start=$(date -I -v-1y -v+2d)&interval=1d" | jq '.[].price' | uplot line
|
30 |
+
'''
|
df.py
CHANGED
@@ -43,31 +43,36 @@ def extract_date_index(df):
|
|
43 |
|
44 |
return df
|
45 |
|
|
|
46 |
FETCH_HEADER = {
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
|
|
59 |
|
60 |
def scrape(url: str):
|
61 |
scraper = cloudscraper.create_scraper()
|
62 |
return scraper.get(url)
|
63 |
|
|
|
64 |
def fetch_btc_etf():
|
65 |
url = "https://farside.co.uk/bitcoin-etf-flow-all-data/"
|
66 |
r = scrape(url)
|
67 |
print(r.status_code)
|
68 |
if r.status_code != 200:
|
69 |
print(r.content)
|
70 |
-
btc_etf_flow=pd.DataFrame.from_dict(
|
|
|
|
|
71 |
btc_etf_flow, btc_etf_flow_original = clean_etf_data(btc_etf_flow)
|
72 |
return SimpleNamespace(
|
73 |
url=url,
|
@@ -102,7 +107,9 @@ def fetch_eth_etf():
|
|
102 |
print(r.status_code)
|
103 |
if r.status_code != 200:
|
104 |
print(r.content)
|
105 |
-
eth_etf_flow=pd.DataFrame.from_dict(
|
|
|
|
|
106 |
eth_etf_flow, eth_etf_flow_original = clean_etf_data(eth_etf_flow)
|
107 |
return SimpleNamespace(
|
108 |
url=url,
|
@@ -161,7 +168,8 @@ def fetch_etf_volumes(funds: List[str], start_time=None):
|
|
161 |
def fetch_asset_price(ticker: str, start_time=None):
|
162 |
price = yf.download(ticker, interval="1d", period="max", start=start_time)["Close"]
|
163 |
price = extract_date_index(price)
|
164 |
-
price.rename(columns={
|
|
|
165 |
|
166 |
return price
|
167 |
|
|
|
43 |
|
44 |
return df
|
45 |
|
46 |
+
|
47 |
FETCH_HEADER = {
|
48 |
+
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:98.0) Gecko/20100101 Firefox/98.0",
|
49 |
+
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8",
|
50 |
+
"Accept-Language": "en-US,en;q=0.5",
|
51 |
+
"Accept-Encoding": "gzip, deflate",
|
52 |
+
"Connection": "keep-alive",
|
53 |
+
"Upgrade-Insecure-Requests": "1",
|
54 |
+
"Sec-Fetch-Dest": "document",
|
55 |
+
"Sec-Fetch-Mode": "navigate",
|
56 |
+
"Sec-Fetch-Site": "none",
|
57 |
+
"Sec-Fetch-User": "?1",
|
58 |
+
"Cache-Control": "max-age=0",
|
59 |
+
}
|
60 |
+
|
61 |
|
62 |
def scrape(url: str):
|
63 |
scraper = cloudscraper.create_scraper()
|
64 |
return scraper.get(url)
|
65 |
|
66 |
+
|
67 |
def fetch_btc_etf():
|
68 |
url = "https://farside.co.uk/bitcoin-etf-flow-all-data/"
|
69 |
r = scrape(url)
|
70 |
print(r.status_code)
|
71 |
if r.status_code != 200:
|
72 |
print(r.content)
|
73 |
+
btc_etf_flow = pd.DataFrame.from_dict(
|
74 |
+
{"Date": ["11 Jan 2024", "12 Jan 2024"], "Total": [0, 0]}
|
75 |
+
)
|
76 |
btc_etf_flow, btc_etf_flow_original = clean_etf_data(btc_etf_flow)
|
77 |
return SimpleNamespace(
|
78 |
url=url,
|
|
|
107 |
print(r.status_code)
|
108 |
if r.status_code != 200:
|
109 |
print(r.content)
|
110 |
+
eth_etf_flow = pd.DataFrame.from_dict(
|
111 |
+
{"Date": ["11 Jan 2024", "12 Jan 2024"], "Total": [0, 0]}
|
112 |
+
)
|
113 |
eth_etf_flow, eth_etf_flow_original = clean_etf_data(eth_etf_flow)
|
114 |
return SimpleNamespace(
|
115 |
url=url,
|
|
|
168 |
def fetch_asset_price(ticker: str, start_time=None):
|
169 |
price = yf.download(ticker, interval="1d", period="max", start=start_time)["Close"]
|
170 |
price = extract_date_index(price)
|
171 |
+
price.rename(columns={ticker: "Price"}, inplace=True)
|
172 |
+
print(price.head())
|
173 |
|
174 |
return price
|
175 |
|
pkgx.yml
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
dependencies:
|
2 |
+
nushell.sh: ^0
|
requirements.txt
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
streamlit
|
2 |
pandas
|
3 |
-
yfinance[nospam,repair]==0.2.
|
4 |
altair
|
5 |
vega
|
6 |
workalendar
|
|
|
1 |
streamlit
|
2 |
pandas
|
3 |
+
yfinance[nospam,repair]==0.2.54
|
4 |
altair
|
5 |
vega
|
6 |
workalendar
|