|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import time |
|
import pandas as pd |
|
from typing import Any |
|
from enum import Enum |
|
from tqdm import tqdm |
|
import numpy as np |
|
from web3_utils import query_conditional_tokens_gc_subgraph |
|
from get_mech_info import ( |
|
DATETIME_60_DAYS_AGO, |
|
update_tools_parquet, |
|
update_all_trades_parquet, |
|
) |
|
from utils import ( |
|
wei_to_unit, |
|
convert_hex_to_int, |
|
JSON_DATA_DIR, |
|
ROOT_DIR, |
|
DEFAULT_MECH_FEE, |
|
TMP_DIR, |
|
measure_execution_time, |
|
transform_to_datetime, |
|
) |
|
from staking import label_trades_by_staking |
|
from nr_mech_calls import ( |
|
create_unknown_traders_df, |
|
compute_mech_calls_based_on_timestamps, |
|
) |
|
|
|
DUST_THRESHOLD = 10000000000000 |
|
INVALID_ANSWER = -1 |
|
DEFAULT_60_DAYS_AGO_TIMESTAMP = (DATETIME_60_DAYS_AGO).timestamp() |
|
WXDAI_CONTRACT_ADDRESS = "0xe91D153E0b41518A2Ce8Dd3D7944Fa863463a97d" |
|
DUST_THRESHOLD = 10000000000000 |
|
|
|
|
|
class MarketState(Enum): |
|
"""Market state""" |
|
|
|
OPEN = 1 |
|
PENDING = 2 |
|
FINALIZING = 3 |
|
ARBITRATING = 4 |
|
CLOSED = 5 |
|
|
|
def __str__(self) -> str: |
|
"""Prints the market status.""" |
|
return self.name.capitalize() |
|
|
|
|
|
class MarketAttribute(Enum): |
|
"""Attribute""" |
|
|
|
NUM_TRADES = "Num_trades" |
|
WINNER_TRADES = "Winner_trades" |
|
NUM_REDEEMED = "Num_redeemed" |
|
INVESTMENT = "Investment" |
|
FEES = "Fees" |
|
MECH_CALLS = "Mech_calls" |
|
MECH_FEES = "Mech_fees" |
|
EARNINGS = "Earnings" |
|
NET_EARNINGS = "Net_earnings" |
|
REDEMPTIONS = "Redemptions" |
|
ROI = "ROI" |
|
|
|
def __str__(self) -> str: |
|
"""Prints the attribute.""" |
|
return self.value |
|
|
|
def __repr__(self) -> str: |
|
"""Prints the attribute representation.""" |
|
return self.name |
|
|
|
@staticmethod |
|
def argparse(s: str) -> "MarketAttribute": |
|
"""Performs string conversion to MarketAttribute.""" |
|
try: |
|
return MarketAttribute[s.upper()] |
|
except KeyError as e: |
|
raise ValueError(f"Invalid MarketAttribute: {s}") from e |
|
|
|
|
|
ALL_TRADES_STATS_DF_COLS = [ |
|
"trader_address", |
|
"market_creator", |
|
"trade_id", |
|
"creation_timestamp", |
|
"title", |
|
"market_status", |
|
"collateral_amount", |
|
"outcome_index", |
|
"trade_fee_amount", |
|
"outcomes_tokens_traded", |
|
"current_answer", |
|
"is_invalid", |
|
"winning_trade", |
|
"earnings", |
|
"redeemed", |
|
"redeemed_amount", |
|
"num_mech_calls", |
|
"mech_fee_amount", |
|
"net_earnings", |
|
"roi", |
|
] |
|
|
|
|
|
def _is_redeemed(user_json: dict[str, Any], fpmmTrade: dict[str, Any]) -> bool: |
|
"""Returns whether the user has redeemed the position.""" |
|
user_positions = user_json["data"]["user"]["userPositions"] |
|
condition_id = fpmmTrade["fpmm.condition.id"] |
|
for position in user_positions: |
|
position_condition_ids = position["position"]["conditionIds"] |
|
balance = int(position["balance"]) |
|
|
|
if condition_id in position_condition_ids: |
|
if balance == 0: |
|
return True |
|
|
|
return False |
|
return False |
|
|
|
|
|
def prepare_profitalibity_data( |
|
tools_filename: str, |
|
trades_filename: str, |
|
tmp_dir: bool = False, |
|
) -> pd.DataFrame: |
|
"""Prepare data for profitalibity analysis.""" |
|
|
|
|
|
try: |
|
if tmp_dir: |
|
tools = pd.read_parquet(TMP_DIR / tools_filename) |
|
else: |
|
tools = pd.read_parquet(ROOT_DIR / tools_filename) |
|
|
|
|
|
assert "trader_address" in tools.columns, "trader_address column not found" |
|
|
|
|
|
tools["trader_address"] = tools["trader_address"].str.lower().str.strip() |
|
|
|
tools.drop_duplicates( |
|
subset=["request_id", "request_block"], keep="last", inplace=True |
|
) |
|
tools.to_parquet(ROOT_DIR / tools_filename) |
|
print(f"{tools_filename} loaded") |
|
except FileNotFoundError: |
|
print(f"{tools_filename} not found.") |
|
return |
|
|
|
|
|
print("Reading the new trades file") |
|
try: |
|
if tmp_dir: |
|
fpmmTrades = pd.read_parquet(TMP_DIR / trades_filename) |
|
else: |
|
fpmmTrades = pd.read_parquet(ROOT_DIR / trades_filename) |
|
except FileNotFoundError: |
|
print(f"Error reading {trades_filename} file.") |
|
|
|
|
|
assert "trader_address" in fpmmTrades.columns, "trader_address column not found" |
|
|
|
|
|
fpmmTrades["trader_address"] = fpmmTrades["trader_address"].str.lower().str.strip() |
|
|
|
return fpmmTrades |
|
|
|
|
|
def determine_market_status(trade, current_answer): |
|
"""Determine the market status of a trade.""" |
|
if (current_answer is np.nan or current_answer is None) and time.time() >= int( |
|
trade["fpmm.openingTimestamp"] |
|
): |
|
return MarketState.PENDING |
|
elif current_answer is np.nan or current_answer is None: |
|
return MarketState.OPEN |
|
elif trade["fpmm.isPendingArbitration"]: |
|
return MarketState.ARBITRATING |
|
elif time.time() < int(trade["fpmm.answerFinalizedTimestamp"]): |
|
return MarketState.FINALIZING |
|
return MarketState.CLOSED |
|
|
|
|
|
def analyse_trader( |
|
trader_address: str, |
|
fpmmTrades: pd.DataFrame, |
|
trader_estimated_mech_calls: pd.DataFrame, |
|
daily_info: bool = False, |
|
) -> pd.DataFrame: |
|
"""Analyse a trader's trades""" |
|
fpmmTrades["creation_timestamp"] = pd.to_datetime(fpmmTrades["creationTimestamp"]) |
|
fpmmTrades["creation_date"] = fpmmTrades["creation_timestamp"].dt.date |
|
|
|
trades = fpmmTrades[fpmmTrades["trader_address"] == trader_address] |
|
|
|
|
|
trades_df = pd.DataFrame(columns=ALL_TRADES_STATS_DF_COLS) |
|
if trades.empty: |
|
return trades_df |
|
|
|
|
|
try: |
|
user_json = query_conditional_tokens_gc_subgraph(trader_address) |
|
except Exception as e: |
|
print(f"Error fetching user data: {e}") |
|
return trades_df |
|
|
|
|
|
trades_answer_nan = 0 |
|
trades_no_closed_market = 0 |
|
for i, trade in tqdm(trades.iterrows(), total=len(trades), desc="Analysing trades"): |
|
try: |
|
market_answer = trade["fpmm.currentAnswer"] |
|
trading_day = trade["creation_date"] |
|
trade_id = trade["id"] |
|
if not daily_info and not market_answer: |
|
|
|
trades_answer_nan += 1 |
|
continue |
|
|
|
collateral_amount = wei_to_unit(float(trade["collateralAmount"])) |
|
fee_amount = wei_to_unit(float(trade["feeAmount"])) |
|
outcome_tokens_traded = wei_to_unit(float(trade["outcomeTokensTraded"])) |
|
earnings, winner_trade = (0, False) |
|
redemption = _is_redeemed(user_json, trade) |
|
current_answer = market_answer if market_answer else None |
|
market_creator = trade["market_creator"] |
|
|
|
|
|
market_status = determine_market_status(trade, current_answer) |
|
|
|
|
|
if not daily_info and market_status != MarketState.CLOSED: |
|
|
|
|
|
|
|
trades_no_closed_market += 1 |
|
continue |
|
if current_answer is not None: |
|
current_answer = convert_hex_to_int(current_answer) |
|
|
|
|
|
is_invalid = current_answer == INVALID_ANSWER |
|
|
|
|
|
if current_answer is None: |
|
earnings = 0.0 |
|
winner_trade = None |
|
elif is_invalid: |
|
earnings = collateral_amount |
|
winner_trade = False |
|
elif int(trade["outcomeIndex"]) == current_answer: |
|
earnings = outcome_tokens_traded |
|
winner_trade = True |
|
|
|
|
|
if daily_info: |
|
total_mech_calls = trader_estimated_mech_calls.loc[ |
|
(trader_estimated_mech_calls["trading_day"] == trading_day), |
|
"total_mech_calls", |
|
].iloc[0] |
|
else: |
|
total_mech_calls = trader_estimated_mech_calls.loc[ |
|
(trader_estimated_mech_calls["market"] == trade["title"]) |
|
& (trader_estimated_mech_calls["trade_id"] == trade_id), |
|
"total_mech_calls", |
|
].iloc[0] |
|
|
|
net_earnings = ( |
|
earnings |
|
- fee_amount |
|
- (total_mech_calls * DEFAULT_MECH_FEE) |
|
- collateral_amount |
|
) |
|
|
|
|
|
trades_df.loc[i] = { |
|
"trader_address": trader_address, |
|
"market_creator": market_creator, |
|
"trade_id": trade["id"], |
|
"market_status": market_status.name, |
|
"creation_timestamp": trade["creationTimestamp"], |
|
"title": trade["title"], |
|
"collateral_amount": collateral_amount, |
|
"outcome_index": trade["outcomeIndex"], |
|
"trade_fee_amount": fee_amount, |
|
"outcomes_tokens_traded": outcome_tokens_traded, |
|
"current_answer": current_answer, |
|
"is_invalid": is_invalid, |
|
"winning_trade": winner_trade, |
|
"earnings": earnings, |
|
"redeemed": redemption, |
|
"redeemed_amount": earnings if redemption else 0, |
|
"num_mech_calls": total_mech_calls, |
|
"mech_fee_amount": total_mech_calls * DEFAULT_MECH_FEE, |
|
"net_earnings": net_earnings, |
|
"roi": net_earnings |
|
/ ( |
|
collateral_amount + fee_amount + total_mech_calls * DEFAULT_MECH_FEE |
|
), |
|
} |
|
|
|
except Exception as e: |
|
print(f"Error processing trade {i}: {e}") |
|
print(trade) |
|
continue |
|
|
|
print(f"Number of trades where currentAnswer is NaN = {trades_answer_nan}") |
|
print( |
|
f"Number of trades where the market is not closed = {trades_no_closed_market}" |
|
) |
|
return trades_df |
|
|
|
|
|
def analyse_all_traders( |
|
trades: pd.DataFrame, |
|
estimated_mech_calls: pd.DataFrame, |
|
daily_info: bool = False, |
|
) -> pd.DataFrame: |
|
"""Analyse all creators.""" |
|
|
|
all_traders = [] |
|
for trader in tqdm( |
|
trades["trader_address"].unique(), |
|
total=len(trades["trader_address"].unique()), |
|
desc="Analysing creators", |
|
): |
|
trader_estimated_mech_calls = estimated_mech_calls.loc[ |
|
estimated_mech_calls["trader_address"] == trader |
|
] |
|
all_traders.append( |
|
analyse_trader(trader, trades, trader_estimated_mech_calls, daily_info) |
|
) |
|
|
|
|
|
all_creators_df = pd.concat(all_traders) |
|
|
|
return all_creators_df |
|
|
|
|
|
@measure_execution_time |
|
def run_profitability_analysis( |
|
tools_filename: str, |
|
trades_filename: str, |
|
merge: bool = False, |
|
tmp_dir: bool = False, |
|
): |
|
"""Create all trades analysis.""" |
|
print(f"Preparing data with {tools_filename} and {trades_filename}") |
|
fpmmTrades = prepare_profitalibity_data( |
|
tools_filename, trades_filename, tmp_dir=tmp_dir |
|
) |
|
|
|
if merge: |
|
update_tools_parquet(tools_filename) |
|
|
|
tools = pd.read_parquet(TMP_DIR / "tools.parquet") |
|
|
|
try: |
|
fpmmTrades["creationTimestamp"] = fpmmTrades["creationTimestamp"].apply( |
|
lambda x: transform_to_datetime(x) |
|
) |
|
except Exception as e: |
|
print(f"Transformation not needed") |
|
|
|
print("Computing the estimated mech calls dataset") |
|
trade_mech_calls = compute_mech_calls_based_on_timestamps( |
|
fpmmTrades=fpmmTrades, tools=tools |
|
) |
|
trade_mech_calls.to_parquet(TMP_DIR / "trade_mech_calls.parquet") |
|
|
|
print(trade_mech_calls.total_mech_calls.describe()) |
|
print("Analysing trades...") |
|
all_trades_df = analyse_all_traders(fpmmTrades, trade_mech_calls) |
|
|
|
|
|
if merge: |
|
all_trades_df = update_all_trades_parquet(all_trades_df) |
|
|
|
|
|
all_trades_df.to_parquet(JSON_DATA_DIR / "all_trades_df.parquet", index=False) |
|
|
|
|
|
|
|
invalid_trades = all_trades_df.loc[all_trades_df["is_invalid"] == True] |
|
if len(invalid_trades) == 0: |
|
print("No new invalid trades") |
|
else: |
|
if merge: |
|
try: |
|
print("Merging invalid trades parquet file") |
|
old_invalid_trades = pd.read_parquet( |
|
ROOT_DIR / "invalid_trades.parquet" |
|
) |
|
merge_df = pd.concat( |
|
[old_invalid_trades, invalid_trades], ignore_index=True |
|
) |
|
invalid_trades = merge_df.drop_duplicates() |
|
except Exception as e: |
|
print(f"Error updating the invalid trades parquet {e}") |
|
invalid_trades.to_parquet(ROOT_DIR / "invalid_trades.parquet", index=False) |
|
|
|
all_trades_df = all_trades_df.loc[all_trades_df["is_invalid"] == False] |
|
|
|
all_trades_df = label_trades_by_staking(trades_df=all_trades_df) |
|
|
|
print("Creating unknown traders dataset") |
|
unknown_traders_df, all_trades_df = create_unknown_traders_df( |
|
trades_df=all_trades_df |
|
) |
|
|
|
previous_unknown_traders = pd.read_parquet(ROOT_DIR / "unknown_traders.parquet") |
|
|
|
unknown_traders_df: pd.DataFrame = pd.concat( |
|
[unknown_traders_df, previous_unknown_traders], ignore_index=True |
|
) |
|
unknown_traders_df.drop_duplicates("trade_id", keep="last", inplace=True) |
|
unknown_traders_df.to_parquet(ROOT_DIR / "unknown_traders.parquet", index=False) |
|
|
|
|
|
all_trades_df.to_parquet(ROOT_DIR / "all_trades_profitability.parquet", index=False) |
|
print("Profitability analysis Done!") |
|
|
|
return all_trades_df |
|
|
|
|
|
def add_trades_profitability(trades_filename: str): |
|
print("Reading the trades file") |
|
try: |
|
fpmmTrades = pd.read_parquet(ROOT_DIR / trades_filename) |
|
except FileNotFoundError: |
|
print(f"Error reading {trades_filename} file .") |
|
|
|
|
|
assert "trader_address" in fpmmTrades.columns, "trader_address column not found" |
|
|
|
|
|
fpmmTrades["trader_address"] = fpmmTrades["trader_address"].str.lower().str.strip() |
|
|
|
print("Reading tools parquet file") |
|
tools = pd.read_parquet(TMP_DIR / "tools.parquet") |
|
|
|
try: |
|
fpmmTrades["creationTimestamp"] = fpmmTrades["creationTimestamp"].apply( |
|
lambda x: transform_to_datetime(x) |
|
) |
|
except Exception as e: |
|
print(f"Transformation not needed") |
|
|
|
print("Computing the estimated mech calls dataset") |
|
trade_mech_calls = compute_mech_calls_based_on_timestamps( |
|
fpmmTrades=fpmmTrades, tools=tools |
|
) |
|
print(trade_mech_calls.total_mech_calls.describe()) |
|
print("Analysing trades...") |
|
all_trades_df = analyse_all_traders(fpmmTrades, trade_mech_calls) |
|
|
|
|
|
all_trades_df.to_parquet(JSON_DATA_DIR / "missing_trades_df.parquet", index=False) |
|
|
|
print("Checking invalid trades") |
|
invalid_trades = all_trades_df.loc[all_trades_df["is_invalid"] == True] |
|
if len(invalid_trades) > 0: |
|
try: |
|
print("Merging invalid trades parquet file") |
|
old_invalid_trades = pd.read_parquet(ROOT_DIR / "invalid_trades.parquet") |
|
merge_df = pd.concat( |
|
[old_invalid_trades, invalid_trades], ignore_index=True |
|
) |
|
invalid_trades = merge_df.drop_duplicates("trade_id") |
|
except Exception as e: |
|
print(f"Error updating the invalid trades parquet {e}") |
|
invalid_trades.to_parquet(ROOT_DIR / "invalid_trades.parquet", index=False) |
|
all_trades_df = all_trades_df.loc[all_trades_df["is_invalid"] == False] |
|
|
|
print("Adding staking labels") |
|
all_trades_df = label_trades_by_staking(trades_df=all_trades_df) |
|
print("Creating unknown traders dataset") |
|
unknown_traders_df, all_trades_df = create_unknown_traders_df( |
|
trades_df=all_trades_df |
|
) |
|
if len(unknown_traders_df) > 0: |
|
print("Merging unknown traders info") |
|
|
|
previous_unknown_traders = pd.read_parquet(ROOT_DIR / "unknown_traders.parquet") |
|
|
|
unknown_traders_df: pd.DataFrame = pd.concat( |
|
[unknown_traders_df, previous_unknown_traders], ignore_index=True |
|
) |
|
unknown_traders_df.drop_duplicates("trade_id", keep="last", inplace=True) |
|
unknown_traders_df.to_parquet(ROOT_DIR / "unknown_traders.parquet", index=False) |
|
|
|
print("merge with previous all_trades_profitability") |
|
old_trades = pd.read_parquet(ROOT_DIR / "all_trades_profitability.parquet") |
|
all_trades_df: pd.DataFrame = pd.concat( |
|
[all_trades_df, old_trades], ignore_index=True |
|
) |
|
all_trades_df.drop_duplicates("trade_id", keep="last", inplace=True) |
|
all_trades_df.to_parquet(ROOT_DIR / "all_trades_profitability.parquet", index=False) |
|
|
|
|
|
if __name__ == "__main__": |
|
run_profitability_analysis( |
|
tools_filename="tools.parquet", |
|
trades_filename="fpmmTrades.parquet", |
|
merge=True, |
|
tmp_dir=True, |
|
) |
|
|