cyberosa
commited on
Commit
·
3881446
1
Parent(s):
2b6844e
updating weekly data
Browse files- active_traders.parquet +2 -2
- all_trades_profitability.parquet.gz +2 -2
- closed_markets_div.parquet +2 -2
- daily_info.parquet +2 -2
- error_by_markets.parquet +2 -2
- invalid_trades.parquet +2 -2
- retention_activity.parquet +3 -0
- retention_activity.parquet.gz +2 -2
- scripts/cloud_storage.py +19 -7
- scripts/pull_data.py +16 -15
- scripts/web3_utils.py +11 -1
- service_map.pkl +2 -2
- tools_accuracy.csv +12 -12
- unknown_traders.parquet +2 -2
- weekly_mech_calls.parquet +2 -2
- winning_df.parquet +2 -2
active_traders.parquet
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:17f8bbf1586d566663fa7547d579f8f679ac7abc9249e71bcb01e0b205bb288b
|
3 |
+
size 78414
|
all_trades_profitability.parquet.gz
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:693d4199ee220e2adac3ea474f7513004cd7786d0510cf12c4ff77f1623967af
|
3 |
+
size 10385468
|
closed_markets_div.parquet
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ab8705abf8d0a22589012ab72d31e7f3a53cb5e98a4fa26ec0275db315ceb6ad
|
3 |
+
size 79341
|
daily_info.parquet
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b9772ee2493d8fdbe89ec3ba43a15cfa5856e92991c9f3e174f6c128d2a053cf
|
3 |
+
size 2290925
|
error_by_markets.parquet
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bd12409cd7b9dd33ab7dfd18e41442e3ad685f1f5ac81f1de4a2138989ad5bb6
|
3 |
+
size 12397
|
invalid_trades.parquet
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bcadecf13096eb0ad9f17560d688ac04e467fb9dbe4d97df3fb77718839a8f8e
|
3 |
+
size 769721
|
retention_activity.parquet
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bf4e97cb2e122c36862a0990aed08ade1d4b1e12fdfe755a25c4fdb166bfaee0
|
3 |
+
size 11862944
|
retention_activity.parquet.gz
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7a5b907c7fe0f87db720b8d3f29ca67fe07b685bfa6299c2e42eb99bcd3bcf56
|
3 |
+
size 7213628
|
scripts/cloud_storage.py
CHANGED
@@ -3,13 +3,14 @@ from minio.error import S3Error
|
|
3 |
import os
|
4 |
import argparse
|
5 |
|
6 |
-
from utils import HIST_DIR
|
7 |
|
8 |
MINIO_ENDPOINT = "minio.autonolas.tech"
|
9 |
ACCESS_KEY = os.environ.get("CLOUD_ACCESS_KEY", None)
|
10 |
SECRET_KEY = os.environ.get("CLOUD_SECRET_KEY", None)
|
11 |
BUCKET_NAME = "weekly-stats"
|
12 |
FOLDER_NAME = "historical_data"
|
|
|
13 |
|
14 |
|
15 |
def initialize_client():
|
@@ -23,10 +24,15 @@ def initialize_client():
|
|
23 |
return client
|
24 |
|
25 |
|
26 |
-
def upload_file(
|
|
|
|
|
27 |
"""Upload a file to the bucket"""
|
28 |
try:
|
29 |
-
|
|
|
|
|
|
|
30 |
print(
|
31 |
f"filename={filename}, object_name={OBJECT_NAME} and file_path={file_path}"
|
32 |
)
|
@@ -40,21 +46,22 @@ def upload_file(client, filename: str, file_path: str) -> bool:
|
|
40 |
return False
|
41 |
|
42 |
|
43 |
-
def download_file(client, filename: str
|
44 |
"""Download the file back"""
|
45 |
try:
|
46 |
OBJECT_NAME = FOLDER_NAME + "/" + filename
|
|
|
47 |
client.fget_object(BUCKET_NAME, OBJECT_NAME, "downloaded_" + file_path)
|
48 |
print(f"File '{OBJECT_NAME}' downloaded as 'downloaded_{file_path}'.")
|
49 |
except S3Error as err:
|
50 |
print(f"Error downloading file: {err}")
|
51 |
|
52 |
|
53 |
-
def load_historical_file(client, filename: str) -> bool:
|
54 |
"""Function to load one file into the cloud storage"""
|
55 |
file_path = filename
|
56 |
file_path = HIST_DIR / filename
|
57 |
-
return upload_file(client, filename, file_path)
|
58 |
|
59 |
|
60 |
def upload_historical_file(filename: str):
|
@@ -90,4 +97,9 @@ if __name__ == "__main__":
|
|
90 |
|
91 |
client = initialize_client()
|
92 |
# load_historical_file(client, filename)
|
93 |
-
process_historical_files(client)
|
|
|
|
|
|
|
|
|
|
|
|
3 |
import os
|
4 |
import argparse
|
5 |
|
6 |
+
from utils import HIST_DIR, ROOT_DIR
|
7 |
|
8 |
MINIO_ENDPOINT = "minio.autonolas.tech"
|
9 |
ACCESS_KEY = os.environ.get("CLOUD_ACCESS_KEY", None)
|
10 |
SECRET_KEY = os.environ.get("CLOUD_SECRET_KEY", None)
|
11 |
BUCKET_NAME = "weekly-stats"
|
12 |
FOLDER_NAME = "historical_data"
|
13 |
+
MAY_FOLDER = "may2024"
|
14 |
|
15 |
|
16 |
def initialize_client():
|
|
|
24 |
return client
|
25 |
|
26 |
|
27 |
+
def upload_file(
|
28 |
+
client, filename: str, file_path: str, extra_folder: str = None
|
29 |
+
) -> bool:
|
30 |
"""Upload a file to the bucket"""
|
31 |
try:
|
32 |
+
if extra_folder is not None:
|
33 |
+
OBJECT_NAME = FOLDER_NAME + "/" + extra_folder + "/" + filename
|
34 |
+
else:
|
35 |
+
OBJECT_NAME = FOLDER_NAME + "/" + filename
|
36 |
print(
|
37 |
f"filename={filename}, object_name={OBJECT_NAME} and file_path={file_path}"
|
38 |
)
|
|
|
46 |
return False
|
47 |
|
48 |
|
49 |
+
def download_file(client, filename: str):
|
50 |
"""Download the file back"""
|
51 |
try:
|
52 |
OBJECT_NAME = FOLDER_NAME + "/" + filename
|
53 |
+
file_path = filename
|
54 |
client.fget_object(BUCKET_NAME, OBJECT_NAME, "downloaded_" + file_path)
|
55 |
print(f"File '{OBJECT_NAME}' downloaded as 'downloaded_{file_path}'.")
|
56 |
except S3Error as err:
|
57 |
print(f"Error downloading file: {err}")
|
58 |
|
59 |
|
60 |
+
def load_historical_file(client, filename: str, extra_folder: str = None) -> bool:
|
61 |
"""Function to load one file into the cloud storage"""
|
62 |
file_path = filename
|
63 |
file_path = HIST_DIR / filename
|
64 |
+
return upload_file(client, filename, file_path, extra_folder)
|
65 |
|
66 |
|
67 |
def upload_historical_file(filename: str):
|
|
|
97 |
|
98 |
client = initialize_client()
|
99 |
# load_historical_file(client, filename)
|
100 |
+
# process_historical_files(client)
|
101 |
+
# checking files at the cloud storage
|
102 |
+
files = ["data_delivers_06_05_2024.csv", "data_tools_06_05_2024.csv"]
|
103 |
+
for old_file in files:
|
104 |
+
# download_file(client=client, filename=tools_file)
|
105 |
+
load_historical_file(client=client, filename=old_file, extra_folder=MAY_FOLDER)
|
scripts/pull_data.py
CHANGED
@@ -23,7 +23,7 @@ from get_mech_info import (
|
|
23 |
)
|
24 |
from update_tools_accuracy import compute_tools_accuracy
|
25 |
from cleaning_old_info import clean_old_data_from_parquet_files
|
26 |
-
from web3_utils import
|
27 |
from manage_space_files import move_files
|
28 |
from cloud_storage import upload_historical_file
|
29 |
from tools_metrics import compute_tools_based_datasets
|
@@ -89,23 +89,24 @@ def save_historical_data():
|
|
89 |
def only_new_weekly_analysis():
|
90 |
"""Run weekly analysis for the FPMMS project."""
|
91 |
# Run markets ETL
|
92 |
-
logging.info("Running markets ETL")
|
93 |
-
mkt_etl(MARKETS_FILENAME)
|
94 |
-
logging.info("Markets ETL completed")
|
95 |
-
|
96 |
-
# Mech events ETL
|
97 |
-
logging.info("Generating the mech json files")
|
98 |
-
# get only new data
|
99 |
-
latest_timestamp = get_mech_events_since_last_run(logger)
|
100 |
-
if latest_timestamp == None:
|
101 |
-
|
102 |
-
|
103 |
-
logging.info(f"Finished generating the mech json files from {latest_timestamp}")
|
104 |
|
105 |
# FpmmTrades ETL
|
|
|
106 |
fpmmTrades_etl(
|
107 |
trades_filename="new_fpmmTrades.parquet",
|
108 |
-
from_timestamp=int(
|
109 |
)
|
110 |
# merge with previous file
|
111 |
print("Merging with previous fpmmTrades file")
|
@@ -134,7 +135,7 @@ def only_new_weekly_analysis():
|
|
134 |
|
135 |
save_historical_data()
|
136 |
try:
|
137 |
-
clean_old_data_from_parquet_files("2024-
|
138 |
except Exception as e:
|
139 |
print("Error cleaning the oldest information from parquet files")
|
140 |
print(f"reason = {e}")
|
|
|
23 |
)
|
24 |
from update_tools_accuracy import compute_tools_accuracy
|
25 |
from cleaning_old_info import clean_old_data_from_parquet_files
|
26 |
+
from web3_utils import get_timestamp_two_weeks_ago
|
27 |
from manage_space_files import move_files
|
28 |
from cloud_storage import upload_historical_file
|
29 |
from tools_metrics import compute_tools_based_datasets
|
|
|
89 |
def only_new_weekly_analysis():
|
90 |
"""Run weekly analysis for the FPMMS project."""
|
91 |
# Run markets ETL
|
92 |
+
# logging.info("Running markets ETL")
|
93 |
+
# mkt_etl(MARKETS_FILENAME)
|
94 |
+
# logging.info("Markets ETL completed")
|
95 |
+
|
96 |
+
# # Mech events ETL
|
97 |
+
# logging.info("Generating the mech json files")
|
98 |
+
# # get only new data
|
99 |
+
# latest_timestamp = get_mech_events_since_last_run(logger)
|
100 |
+
# if latest_timestamp == None:
|
101 |
+
# print("Error while getting the mech events")
|
102 |
+
# return
|
103 |
+
# logging.info(f"Finished generating the mech json files from {latest_timestamp}")
|
104 |
|
105 |
# FpmmTrades ETL
|
106 |
+
trades_timestamp = get_timestamp_two_weeks_ago()
|
107 |
fpmmTrades_etl(
|
108 |
trades_filename="new_fpmmTrades.parquet",
|
109 |
+
from_timestamp=int(trades_timestamp.timestamp()),
|
110 |
)
|
111 |
# merge with previous file
|
112 |
print("Merging with previous fpmmTrades file")
|
|
|
135 |
|
136 |
save_historical_data()
|
137 |
try:
|
138 |
+
clean_old_data_from_parquet_files("2024-12-02")
|
139 |
except Exception as e:
|
140 |
print("Error cleaning the oldest information from parquet files")
|
141 |
print(f"reason = {e}")
|
scripts/web3_utils.py
CHANGED
@@ -5,7 +5,7 @@ import time
|
|
5 |
import requests
|
6 |
from functools import partial
|
7 |
from string import Template
|
8 |
-
from datetime import datetime
|
9 |
from concurrent.futures import ThreadPoolExecutor
|
10 |
from collections import defaultdict
|
11 |
from tqdm import tqdm
|
@@ -67,6 +67,16 @@ def update_block_request_map(block_request_id_map: dict) -> None:
|
|
67 |
pickle.dump(block_request_id_map, handle, protocol=pickle.HIGHEST_PROTOCOL)
|
68 |
|
69 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
70 |
def reduce_window(contract_instance, event, from_block, batch_size, latest_block):
|
71 |
"""Dynamically reduce the batch size window."""
|
72 |
keep_fraction = 1 - REDUCE_FACTOR
|
|
|
5 |
import requests
|
6 |
from functools import partial
|
7 |
from string import Template
|
8 |
+
from datetime import datetime, timedelta
|
9 |
from concurrent.futures import ThreadPoolExecutor
|
10 |
from collections import defaultdict
|
11 |
from tqdm import tqdm
|
|
|
67 |
pickle.dump(block_request_id_map, handle, protocol=pickle.HIGHEST_PROTOCOL)
|
68 |
|
69 |
|
70 |
+
def get_timestamp_two_weeks_ago() -> int:
|
71 |
+
two_weeks_ago = datetime.now() - timedelta(weeks=2)
|
72 |
+
two_weeks_ago_date = two_weeks_ago.strftime("%Y-%m-%d")
|
73 |
+
print(f"Timestamp of two weeks ago = {two_weeks_ago_date}")
|
74 |
+
# Convert the date string to a pandas Timestamp object with UTC timezone
|
75 |
+
return pd.Timestamp(datetime.strptime(two_weeks_ago_date, "%Y-%m-%d")).tz_localize(
|
76 |
+
"UTC"
|
77 |
+
)
|
78 |
+
|
79 |
+
|
80 |
def reduce_window(contract_instance, event, from_block, batch_size, latest_block):
|
81 |
"""Dynamically reduce the batch size window."""
|
82 |
keep_fraction = 1 - REDUCE_FACTOR
|
service_map.pkl
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a00b07411592ce1e05011a604365801a1558771868bdb2c80772a4d1a67b6dab
|
3 |
+
size 170535
|
tools_accuracy.csv
CHANGED
@@ -1,13 +1,13 @@
|
|
1 |
tool,tool_accuracy,total_requests,min,max
|
2 |
-
claude-prediction-offline,
|
3 |
-
claude-prediction-online,
|
4 |
-
prediction-offline,
|
5 |
-
prediction-offline-sme,
|
6 |
-
prediction-online,
|
7 |
-
prediction-online-sme,49.
|
8 |
-
prediction-request-rag,
|
9 |
-
prediction-request-rag-claude,
|
10 |
-
prediction-request-reasoning,52.
|
11 |
-
prediction-request-reasoning-claude,
|
12 |
-
prediction-url-cot-claude,
|
13 |
-
superforcaster,
|
|
|
1 |
tool,tool_accuracy,total_requests,min,max
|
2 |
+
claude-prediction-offline,57.472294476382004,91498,2024-12-02 00:04:45,2025-02-02 02:10:00
|
3 |
+
claude-prediction-online,57.80809753931797,58561,2024-12-02 00:04:35,2025-02-02 17:04:55
|
4 |
+
prediction-offline,57.71920894368719,159129,2024-12-02 00:04:45,2025-02-02 01:59:20
|
5 |
+
prediction-offline-sme,51.10462692788662,28788,2024-12-02 00:02:20,2025-02-02 01:01:20
|
6 |
+
prediction-online,50.845346147001244,5619,2024-12-02 00:27:20,2025-02-01 17:43:20
|
7 |
+
prediction-online-sme,49.12801484230055,5390,2024-12-02 00:10:10,2025-02-01 17:43:30
|
8 |
+
prediction-request-rag,48.80546075085324,1172,2024-12-02 00:18:00,2025-02-01 10:59:45
|
9 |
+
prediction-request-rag-claude,50.17513134851138,1142,2024-12-02 04:00:20,2025-02-01 16:49:45
|
10 |
+
prediction-request-reasoning,52.38675812260929,107447,2024-12-02 00:16:10,2025-02-01 18:02:25
|
11 |
+
prediction-request-reasoning-claude,61.13902847571189,1194,2024-12-02 00:37:15,2025-02-01 17:23:30
|
12 |
+
prediction-url-cot-claude,53.905845835488876,1933,2024-12-02 01:06:10,2025-02-01 19:00:15
|
13 |
+
superforcaster,53.4790811230718,9011,2024-12-02 00:37:00,2025-02-01 23:58:25
|
unknown_traders.parquet
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b38e5e6401c7f724e8fd425aa5e36da471d2f649ff7624c4f7410f2924e53b4a
|
3 |
+
size 419033
|
weekly_mech_calls.parquet
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f675b1980af3d10a0e10d284d7ef93ec0496946dc4ba67adf2622ed360a92875
|
3 |
+
size 54025
|
winning_df.parquet
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cc6147e64c72f32e90cde223b232a3ea65ee9c7dde97395daeb9aa31a5ef4886
|
3 |
+
size 11882
|