|
from web3 import Web3 |
|
import os |
|
import requests |
|
import time |
|
import pickle |
|
from datetime import datetime, timezone |
|
from functools import partial |
|
import pandas as pd |
|
import pytz |
|
from tqdm import tqdm |
|
from utils import ROOT_DIR, TMP_DIR, measure_execution_time |
|
from concurrent.futures import ThreadPoolExecutor |
|
|
|
GNOSIS_API_INTERVAL = 0.2 |
|
GNOSIS_URL = "https://api.gnosisscan.io/api" |
|
GNOSIS_API_KEY = os.environ.get("GNOSIS_API_KEY", None) |
|
|
|
|
|
|
|
w3 = Web3(Web3.HTTPProvider("https://rpc.gnosischain.com")) |
|
|
|
|
|
def parallelize_timestamp_computation(df: pd.DataFrame, function: callable) -> list: |
|
"""Parallelize the timestamp conversion.""" |
|
tx_hashes = df["tx_hash"].tolist() |
|
with ThreadPoolExecutor(max_workers=10) as executor: |
|
results = list(tqdm(executor.map(function, tx_hashes), total=len(tx_hashes))) |
|
return results |
|
|
|
|
|
def transform_timestamp_to_datetime(timestamp): |
|
dt = datetime.fromtimestamp(timestamp, timezone.utc) |
|
return dt |
|
|
|
|
|
def get_tx_hash(trader_address, request_block): |
|
"""Function to get the transaction hash from the address and block number""" |
|
params = { |
|
"module": "account", |
|
"action": "txlist", |
|
"address": trader_address, |
|
"page": 1, |
|
"offset": 100, |
|
"startblock": request_block, |
|
"endblock": request_block, |
|
"sort": "asc", |
|
"apikey": GNOSIS_API_KEY, |
|
} |
|
|
|
try: |
|
response = requests.get(GNOSIS_URL, params=params) |
|
tx_list = response.json()["result"] |
|
time.sleep(GNOSIS_API_INTERVAL) |
|
if len(tx_list) > 1: |
|
raise ValueError("More than one transaction found") |
|
return tx_list[0]["hash"] |
|
except Exception as e: |
|
return None |
|
|
|
|
|
def add_tx_hash_info(filename: str = "tools.parquet"): |
|
"""Function to add the hash info to the saved tools parquet file""" |
|
tools = pd.read_parquet(ROOT_DIR / filename) |
|
tools["tx_hash"] = None |
|
total_errors = 0 |
|
for i, mech_request in tqdm( |
|
tools.iterrows(), total=len(tools), desc="Adding tx hash" |
|
): |
|
try: |
|
trader_address = mech_request["trader_address"] |
|
block_number = mech_request["request_block"] |
|
tools.at[i, "tx_hash"] = get_tx_hash( |
|
trader_address=trader_address, request_block=block_number |
|
) |
|
except Exception as e: |
|
print(f"Error with mech request {mech_request}") |
|
total_errors += 1 |
|
continue |
|
|
|
print(f"Total number of errors = {total_errors}") |
|
tools.to_parquet(ROOT_DIR / filename) |
|
|
|
|
|
def get_transaction_timestamp(tx_hash: str, web3: Web3): |
|
|
|
try: |
|
|
|
tx = web3.eth.get_transaction(tx_hash) |
|
|
|
block = web3.eth.get_block(tx["blockNumber"]) |
|
|
|
timestamp = block["timestamp"] |
|
|
|
|
|
dt = datetime.fromtimestamp(timestamp, tz=pytz.UTC) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
return dt.strftime("%Y-%m-%d %H:%M:%S") |
|
except Exception as e: |
|
print(f"Error getting the timestamp from {tx_hash}") |
|
return None |
|
|
|
|
|
@measure_execution_time |
|
def compute_request_time(tools_df: pd.DataFrame) -> pd.DataFrame: |
|
"""Function to compute the request timestamp from the tx hash""" |
|
|
|
try: |
|
gnosis_info = pickle.load(open(TMP_DIR / "gnosis_info.pkl", "rb")) |
|
except Exception: |
|
print("File not found or not created. Creating a new one") |
|
gnosis_info = {} |
|
|
|
|
|
tools_df["request_time"] = tools_df["tx_hash"].map(gnosis_info) |
|
|
|
|
|
missing_time_indices = tools_df[tools_df["request_time"].isna()].index |
|
print(f"length of missing_time_indices = {len(missing_time_indices)}") |
|
|
|
partial_mech_request_timestamp = partial(get_transaction_timestamp, web3=w3) |
|
missing_timestamps = parallelize_timestamp_computation( |
|
tools_df.loc[missing_time_indices], partial_mech_request_timestamp |
|
) |
|
|
|
|
|
for i, timestamp in zip(missing_time_indices, missing_timestamps): |
|
tools_df.at[i, "request_time"] = timestamp |
|
|
|
tools_df["request_month_year"] = pd.to_datetime( |
|
tools_df["request_time"], utc=True |
|
).dt.strftime("%Y-%m") |
|
tools_df["request_month_year_week"] = ( |
|
pd.to_datetime(tools_df["request_time"]) |
|
.dt.to_period("W") |
|
.dt.start_time.dt.strftime("%b-%d-%Y") |
|
) |
|
|
|
new_timestamps = ( |
|
tools_df[["tx_hash", "request_time"]] |
|
.dropna() |
|
.set_index("tx_hash") |
|
.to_dict()["request_time"] |
|
) |
|
gnosis_info.update(new_timestamps) |
|
|
|
with open(TMP_DIR / "gnosis_info.pkl", "wb") as f: |
|
pickle.dump(gnosis_info, f) |
|
return tools_df |
|
|
|
|
|
def get_account_details(address): |
|
|
|
|
|
params = { |
|
"module": "account", |
|
"action": "txlistinternal", |
|
"address": address, |
|
|
|
|
|
|
|
|
|
|
|
"apikey": GNOSIS_API_KEY, |
|
} |
|
|
|
try: |
|
response = requests.get(GNOSIS_URL, params=params) |
|
return response.json() |
|
except Exception as e: |
|
return {"error": str(e)} |
|
|
|
|
|
if __name__ == "__main__": |
|
|
|
new_tools = pd.read_parquet(ROOT_DIR / "new_tools.parquet") |
|
new_tools = compute_request_time(new_tools) |
|
new_tools.to_parquet(ROOT_DIR / "new_tools.parquet") |
|
|
|
|
|
|