rosacastillo commited on
Commit
2251a51
·
1 Parent(s): 2368756

proposal of daily info graphs

Browse files
app.py CHANGED
@@ -18,8 +18,10 @@ from tabs.staking import plot_staking_trades_per_market_by_week
18
 
19
  from tabs.metrics import (
20
  trade_metric_choices,
 
21
  tool_metric_choices,
22
  default_trade_metric,
 
23
  default_tool_metric,
24
  plot_trade_metrics,
25
  get_trade_metrics_text,
@@ -51,6 +53,8 @@ from tabs.error import (
51
  get_error_data_overall_by_market,
52
  plot_tool_error_data_by_market,
53
  )
 
 
54
  from tabs.about import about_olas_predict, about_this_dashboard
55
  import matplotlib.pyplot as plt
56
  from scripts.utils import INC_TOOLS
@@ -108,7 +112,13 @@ def get_all_data():
108
  Get all data from the tools.parquet, tools_accuracy and trades parquet files
109
  """
110
  logger.info("Getting all data")
 
111
  con = duckdb.connect(":memory:")
 
 
 
 
 
112
 
113
  # Query to fetch invalid trades data
114
  query4 = f"""
@@ -141,14 +151,16 @@ def get_all_data():
141
 
142
  con.close()
143
 
144
- return df1, df2, df3, df4
145
 
146
 
147
  def prepare_data():
148
  """
149
  Prepare the data for the dashboard
150
  """
151
- tools_df, trades_df, tools_accuracy_info, invalid_trades = get_all_data()
 
 
152
  print(trades_df.info())
153
 
154
  tools_df = prepare_tools(tools_df)
@@ -162,17 +174,17 @@ def prepare_data():
162
  invalid_trades["creation_timestamp"]
163
  )
164
  invalid_trades["creation_date"] = invalid_trades["creation_timestamp"].dt.date
165
-
166
  # discovering outliers for ROI
167
  outliers = trades_df.loc[trades_df["roi"] >= 1000]
168
  if len(outliers) > 0:
169
  outliers.to_parquet("./data/outliers.parquet")
170
  trades_df = trades_df.loc[trades_df["roi"] < 1000]
171
 
172
- return tools_df, trades_df, tools_accuracy_info, invalid_trades
173
 
174
 
175
- tools_df, trades_df, tools_accuracy_info, invalid_trades = prepare_data()
176
 
177
 
178
  demo = gr.Blocks()
@@ -194,7 +206,7 @@ with demo:
194
  )
195
 
196
  with gr.Tabs():
197
- with gr.TabItem("🔥Trades Dashboard"):
198
  with gr.Row():
199
  gr.Markdown("# Trend of weekly trades")
200
  with gr.Row():
@@ -204,7 +216,9 @@ with demo:
204
 
205
  with gr.Row():
206
  with gr.Column(scale=1):
207
- gr.Markdown("# Weekly percentage of winning for Agent based trades")
 
 
208
  agent_winning_trades = (
209
  integrated_plot_winning_trades_per_market_by_week_v2(
210
  trades_df=trades_df, trader_filter="agent"
@@ -253,7 +267,7 @@ with demo:
253
 
254
  # Agentic traders graph
255
  with gr.Row():
256
- gr.Markdown("# Weekly trading metrics for trades coming from Agents")
257
  with gr.Row():
258
  trade_a_details_selector = gr.Dropdown(
259
  label="Select a trade metric",
@@ -308,7 +322,6 @@ with demo:
308
  trade_details_text = get_trade_metrics_text()
309
 
310
  def update_na_trade_details(trade_detail, trade_details_plot):
311
- print(f"user selected option= {trade_detail}")
312
  new_a_plot = plot_trade_metrics(
313
  metric_name=trade_detail,
314
  trades_df=trades_df,
@@ -321,6 +334,67 @@ with demo:
321
  inputs=[trade_na_details_selector, na_trade_details_plot],
322
  outputs=[na_trade_details_plot],
323
  )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
324
 
325
  with gr.TabItem("🔒 Staking traders"):
326
  with gr.Row():
 
18
 
19
  from tabs.metrics import (
20
  trade_metric_choices,
21
+ trade_daily_metric_choices,
22
  tool_metric_choices,
23
  default_trade_metric,
24
+ default_daily_metric,
25
  default_tool_metric,
26
  plot_trade_metrics,
27
  get_trade_metrics_text,
 
53
  get_error_data_overall_by_market,
54
  plot_tool_error_data_by_market,
55
  )
56
+
57
+ from tabs.daily_graphs import get_current_week_data, plot_daily_metrics
58
  from tabs.about import about_olas_predict, about_this_dashboard
59
  import matplotlib.pyplot as plt
60
  from scripts.utils import INC_TOOLS
 
112
  Get all data from the tools.parquet, tools_accuracy and trades parquet files
113
  """
114
  logger.info("Getting all data")
115
+ # Query to fetch daily live data
116
  con = duckdb.connect(":memory:")
117
+ query5 = f"""
118
+ SELECT *
119
+ FROM read_parquet('./data/daily_info.parquet')
120
+ """
121
+ df5 = con.execute(query5).fetchdf()
122
 
123
  # Query to fetch invalid trades data
124
  query4 = f"""
 
151
 
152
  con.close()
153
 
154
+ return df1, df2, df3, df4, df5
155
 
156
 
157
  def prepare_data():
158
  """
159
  Prepare the data for the dashboard
160
  """
161
+ tools_df, trades_df, tools_accuracy_info, invalid_trades, daily_info = (
162
+ get_all_data()
163
+ )
164
  print(trades_df.info())
165
 
166
  tools_df = prepare_tools(tools_df)
 
174
  invalid_trades["creation_timestamp"]
175
  )
176
  invalid_trades["creation_date"] = invalid_trades["creation_timestamp"].dt.date
177
+ daily_info["creation_date"] = daily_info["creation_timestamp"].dt.date
178
  # discovering outliers for ROI
179
  outliers = trades_df.loc[trades_df["roi"] >= 1000]
180
  if len(outliers) > 0:
181
  outliers.to_parquet("./data/outliers.parquet")
182
  trades_df = trades_df.loc[trades_df["roi"] < 1000]
183
 
184
+ return tools_df, trades_df, tools_accuracy_info, invalid_trades, daily_info
185
 
186
 
187
+ tools_df, trades_df, tools_accuracy_info, invalid_trades, daily_info = prepare_data()
188
 
189
 
190
  demo = gr.Blocks()
 
206
  )
207
 
208
  with gr.Tabs():
209
+ with gr.TabItem("🔥 Weekly Trades Dashboard"):
210
  with gr.Row():
211
  gr.Markdown("# Trend of weekly trades")
212
  with gr.Row():
 
216
 
217
  with gr.Row():
218
  with gr.Column(scale=1):
219
+ gr.Markdown(
220
+ "# Weekly percentage of winning for 🤖 Agent based trades"
221
+ )
222
  agent_winning_trades = (
223
  integrated_plot_winning_trades_per_market_by_week_v2(
224
  trades_df=trades_df, trader_filter="agent"
 
267
 
268
  # Agentic traders graph
269
  with gr.Row():
270
+ gr.Markdown("# Weekly trading metrics for trades coming from Agents 🤖")
271
  with gr.Row():
272
  trade_a_details_selector = gr.Dropdown(
273
  label="Select a trade metric",
 
322
  trade_details_text = get_trade_metrics_text()
323
 
324
  def update_na_trade_details(trade_detail, trade_details_plot):
 
325
  new_a_plot = plot_trade_metrics(
326
  metric_name=trade_detail,
327
  trades_df=trades_df,
 
334
  inputs=[trade_na_details_selector, na_trade_details_plot],
335
  outputs=[na_trade_details_plot],
336
  )
337
+ with gr.TabItem("📅 Daily trades dashboard (WIP)"):
338
+ current_week_trades = get_current_week_data(trades_df=trades_df)
339
+ live_trades_current_week = get_current_week_data(trades_df=daily_info)
340
+ with gr.Row():
341
+ gr.Markdown("# Daily live metrics for all trades")
342
+ with gr.Row():
343
+ trade_live_details_selector = gr.Dropdown(
344
+ label="Select a daily live metric",
345
+ choices=trade_daily_metric_choices,
346
+ value=default_daily_metric,
347
+ )
348
+
349
+ with gr.Row():
350
+ with gr.Column(scale=3):
351
+ trade_live_details_plot = plot_daily_metrics(
352
+ metric_name=default_daily_metric,
353
+ trades_df=live_trades_current_week,
354
+ )
355
+ with gr.Column(scale=1):
356
+ trade_details_text = get_trade_metrics_text()
357
+
358
+ def update_trade_live_details(trade_detail, trade_live_details_plot):
359
+ new_a_plot = plot_daily_metrics(
360
+ metric_name=trade_detail, trades_df=live_trades_current_week
361
+ )
362
+ return new_a_plot
363
+
364
+ trade_live_details_selector.change(
365
+ update_trade_live_details,
366
+ inputs=[trade_live_details_selector, trade_live_details_plot],
367
+ outputs=[trade_live_details_plot],
368
+ )
369
+
370
+ with gr.Row():
371
+ gr.Markdown("# Daily profitability metrics available for all trades")
372
+ with gr.Row():
373
+ trade_daily_details_selector = gr.Dropdown(
374
+ label="Select a daily trade metric",
375
+ choices=trade_metric_choices,
376
+ value=default_trade_metric,
377
+ )
378
+
379
+ with gr.Row():
380
+ with gr.Column(scale=3):
381
+ trade_daily_details_plot = plot_daily_metrics(
382
+ metric_name=default_trade_metric, trades_df=current_week_trades
383
+ )
384
+ with gr.Column(scale=1):
385
+ trade_details_text = get_trade_metrics_text()
386
+
387
+ def update_trade_daily_details(trade_detail, trade_daily_details_plot):
388
+ new_a_plot = plot_daily_metrics(
389
+ metric_name=trade_detail, trades_df=current_week_trades
390
+ )
391
+ return new_a_plot
392
+
393
+ trade_daily_details_selector.change(
394
+ update_trade_daily_details,
395
+ inputs=[trade_daily_details_selector, trade_daily_details_plot],
396
+ outputs=[trade_daily_details_plot],
397
+ )
398
 
399
  with gr.TabItem("🔒 Staking traders"):
400
  with gr.Row():
data/daily_info.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f41fdb6fc36cf8cb28980bca049de6b4fa986a9800176e269a5259a7e744c514
3
+ size 251792
notebooks/daily_data.ipynb ADDED
@@ -0,0 +1,430 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 2,
6
+ "metadata": {},
7
+ "outputs": [],
8
+ "source": [
9
+ "import pandas as pd"
10
+ ]
11
+ },
12
+ {
13
+ "cell_type": "code",
14
+ "execution_count": 2,
15
+ "metadata": {},
16
+ "outputs": [],
17
+ "source": [
18
+ "all_trades = pd.read_parquet('../data/all_trades_profitability.parquet')"
19
+ ]
20
+ },
21
+ {
22
+ "cell_type": "code",
23
+ "execution_count": 3,
24
+ "metadata": {},
25
+ "outputs": [
26
+ {
27
+ "data": {
28
+ "text/plain": [
29
+ "Timestamp('2024-11-23 01:38:25+0000', tz='UTC')"
30
+ ]
31
+ },
32
+ "execution_count": 3,
33
+ "metadata": {},
34
+ "output_type": "execute_result"
35
+ }
36
+ ],
37
+ "source": [
38
+ "max(all_trades.creation_timestamp)"
39
+ ]
40
+ },
41
+ {
42
+ "cell_type": "code",
43
+ "execution_count": 4,
44
+ "metadata": {},
45
+ "outputs": [
46
+ {
47
+ "data": {
48
+ "text/plain": [
49
+ "Timestamp('2024-09-22 00:02:05+0000', tz='UTC')"
50
+ ]
51
+ },
52
+ "execution_count": 4,
53
+ "metadata": {},
54
+ "output_type": "execute_result"
55
+ }
56
+ ],
57
+ "source": [
58
+ "min(all_trades.creation_timestamp)"
59
+ ]
60
+ },
61
+ {
62
+ "cell_type": "code",
63
+ "execution_count": 3,
64
+ "metadata": {},
65
+ "outputs": [],
66
+ "source": [
67
+ "new_trades = pd.read_parquet('../data/new_fpmmTrades.parquet')"
68
+ ]
69
+ },
70
+ {
71
+ "cell_type": "code",
72
+ "execution_count": 11,
73
+ "metadata": {},
74
+ "outputs": [
75
+ {
76
+ "name": "stdout",
77
+ "output_type": "stream",
78
+ "text": [
79
+ "<class 'pandas.core.frame.DataFrame'>\n",
80
+ "RangeIndex: 3798 entries, 0 to 3797\n",
81
+ "Data columns (total 24 columns):\n",
82
+ " # Column Non-Null Count Dtype \n",
83
+ "--- ------ -------------- ----- \n",
84
+ " 0 collateralAmount 3798 non-null object\n",
85
+ " 1 collateralAmountUSD 3798 non-null object\n",
86
+ " 2 collateralToken 3798 non-null object\n",
87
+ " 3 creationTimestamp 3798 non-null object\n",
88
+ " 4 trader_address 3798 non-null object\n",
89
+ " 5 feeAmount 3798 non-null object\n",
90
+ " 6 id 3798 non-null object\n",
91
+ " 7 oldOutcomeTokenMarginalPrice 3798 non-null object\n",
92
+ " 8 outcomeIndex 3798 non-null object\n",
93
+ " 9 outcomeTokenMarginalPrice 3798 non-null object\n",
94
+ " 10 outcomeTokensTraded 3798 non-null object\n",
95
+ " 11 title 3798 non-null object\n",
96
+ " 12 transactionHash 3798 non-null object\n",
97
+ " 13 type 3798 non-null object\n",
98
+ " 14 market_creator 3798 non-null object\n",
99
+ " 15 fpmm.answerFinalizedTimestamp 0 non-null object\n",
100
+ " 16 fpmm.arbitrationOccurred 3798 non-null bool \n",
101
+ " 17 fpmm.currentAnswer 0 non-null object\n",
102
+ " 18 fpmm.id 3798 non-null object\n",
103
+ " 19 fpmm.isPendingArbitration 3798 non-null bool \n",
104
+ " 20 fpmm.openingTimestamp 3798 non-null object\n",
105
+ " 21 fpmm.outcomes 3798 non-null object\n",
106
+ " 22 fpmm.title 3798 non-null object\n",
107
+ " 23 fpmm.condition.id 3798 non-null object\n",
108
+ "dtypes: bool(2), object(22)\n",
109
+ "memory usage: 660.3+ KB\n"
110
+ ]
111
+ }
112
+ ],
113
+ "source": [
114
+ "new_trades.info()"
115
+ ]
116
+ },
117
+ {
118
+ "cell_type": "code",
119
+ "execution_count": 12,
120
+ "metadata": {},
121
+ "outputs": [
122
+ {
123
+ "data": {
124
+ "text/plain": [
125
+ "3798"
126
+ ]
127
+ },
128
+ "execution_count": 12,
129
+ "metadata": {},
130
+ "output_type": "execute_result"
131
+ }
132
+ ],
133
+ "source": [
134
+ "len(new_trades.id.unique())"
135
+ ]
136
+ },
137
+ {
138
+ "cell_type": "code",
139
+ "execution_count": 4,
140
+ "metadata": {},
141
+ "outputs": [
142
+ {
143
+ "data": {
144
+ "text/plain": [
145
+ "Index(['collateralAmount', 'collateralAmountUSD', 'collateralToken',\n",
146
+ " 'creationTimestamp', 'trader_address', 'feeAmount', 'id',\n",
147
+ " 'oldOutcomeTokenMarginalPrice', 'outcomeIndex',\n",
148
+ " 'outcomeTokenMarginalPrice', 'outcomeTokensTraded', 'title',\n",
149
+ " 'transactionHash', 'type', 'market_creator',\n",
150
+ " 'fpmm.answerFinalizedTimestamp', 'fpmm.arbitrationOccurred',\n",
151
+ " 'fpmm.currentAnswer', 'fpmm.id', 'fpmm.isPendingArbitration',\n",
152
+ " 'fpmm.openingTimestamp', 'fpmm.outcomes', 'fpmm.title',\n",
153
+ " 'fpmm.condition.id'],\n",
154
+ " dtype='object')"
155
+ ]
156
+ },
157
+ "execution_count": 4,
158
+ "metadata": {},
159
+ "output_type": "execute_result"
160
+ }
161
+ ],
162
+ "source": [
163
+ "new_trades.columns"
164
+ ]
165
+ },
166
+ {
167
+ "cell_type": "code",
168
+ "execution_count": 6,
169
+ "metadata": {},
170
+ "outputs": [
171
+ {
172
+ "data": {
173
+ "text/plain": [
174
+ "'1732609530'"
175
+ ]
176
+ },
177
+ "execution_count": 6,
178
+ "metadata": {},
179
+ "output_type": "execute_result"
180
+ }
181
+ ],
182
+ "source": [
183
+ "max(new_trades.creationTimestamp)"
184
+ ]
185
+ },
186
+ {
187
+ "cell_type": "code",
188
+ "execution_count": 13,
189
+ "metadata": {},
190
+ "outputs": [],
191
+ "source": [
192
+ "old_trades = pd.read_parquet('../data/fpmmTrades.parquet')"
193
+ ]
194
+ },
195
+ {
196
+ "cell_type": "code",
197
+ "execution_count": 14,
198
+ "metadata": {},
199
+ "outputs": [
200
+ {
201
+ "data": {
202
+ "text/plain": [
203
+ "'1732609530'"
204
+ ]
205
+ },
206
+ "execution_count": 14,
207
+ "metadata": {},
208
+ "output_type": "execute_result"
209
+ }
210
+ ],
211
+ "source": [
212
+ "max(old_trades.creationTimestamp)"
213
+ ]
214
+ },
215
+ {
216
+ "cell_type": "code",
217
+ "execution_count": 25,
218
+ "metadata": {},
219
+ "outputs": [],
220
+ "source": [
221
+ "all_trades_before = pd.read_parquet('../data/daily_info.parquet')"
222
+ ]
223
+ },
224
+ {
225
+ "cell_type": "code",
226
+ "execution_count": 26,
227
+ "metadata": {},
228
+ "outputs": [
229
+ {
230
+ "name": "stdout",
231
+ "output_type": "stream",
232
+ "text": [
233
+ "<class 'pandas.core.frame.DataFrame'>\n",
234
+ "RangeIndex: 3882 entries, 0 to 3881\n",
235
+ "Data columns (total 21 columns):\n",
236
+ " # Column Non-Null Count Dtype \n",
237
+ "--- ------ -------------- ----- \n",
238
+ " 0 trader_address 3882 non-null object \n",
239
+ " 1 market_creator 3882 non-null object \n",
240
+ " 2 trade_id 3882 non-null object \n",
241
+ " 3 creation_timestamp 3882 non-null datetime64[ns, UTC]\n",
242
+ " 4 title 3882 non-null object \n",
243
+ " 5 market_status 3882 non-null object \n",
244
+ " 6 collateral_amount 3882 non-null float64 \n",
245
+ " 7 outcome_index 3882 non-null object \n",
246
+ " 8 trade_fee_amount 3882 non-null float64 \n",
247
+ " 9 outcomes_tokens_traded 3882 non-null float64 \n",
248
+ " 10 current_answer 0 non-null object \n",
249
+ " 11 is_invalid 3882 non-null bool \n",
250
+ " 12 winning_trade 0 non-null object \n",
251
+ " 13 earnings 3882 non-null float64 \n",
252
+ " 14 redeemed 3882 non-null bool \n",
253
+ " 15 redeemed_amount 3882 non-null int64 \n",
254
+ " 16 num_mech_calls 3882 non-null int64 \n",
255
+ " 17 mech_fee_amount 3882 non-null float64 \n",
256
+ " 18 net_earnings 3882 non-null float64 \n",
257
+ " 19 roi 3882 non-null float64 \n",
258
+ " 20 staking 3882 non-null object \n",
259
+ "dtypes: bool(2), datetime64[ns, UTC](1), float64(7), int64(2), object(9)\n",
260
+ "memory usage: 583.9+ KB\n"
261
+ ]
262
+ }
263
+ ],
264
+ "source": [
265
+ "all_trades_before.info()"
266
+ ]
267
+ },
268
+ {
269
+ "cell_type": "code",
270
+ "execution_count": 27,
271
+ "metadata": {},
272
+ "outputs": [
273
+ {
274
+ "data": {
275
+ "text/plain": [
276
+ "Index(['trader_address', 'market_creator', 'trade_id', 'creation_timestamp',\n",
277
+ " 'title', 'market_status', 'collateral_amount', 'outcome_index',\n",
278
+ " 'trade_fee_amount', 'outcomes_tokens_traded', 'current_answer',\n",
279
+ " 'is_invalid', 'winning_trade', 'earnings', 'redeemed',\n",
280
+ " 'redeemed_amount', 'num_mech_calls', 'mech_fee_amount', 'net_earnings',\n",
281
+ " 'roi', 'staking'],\n",
282
+ " dtype='object')"
283
+ ]
284
+ },
285
+ "execution_count": 27,
286
+ "metadata": {},
287
+ "output_type": "execute_result"
288
+ }
289
+ ],
290
+ "source": [
291
+ "all_trades_before.columns"
292
+ ]
293
+ },
294
+ {
295
+ "cell_type": "code",
296
+ "execution_count": 23,
297
+ "metadata": {},
298
+ "outputs": [
299
+ {
300
+ "data": {
301
+ "text/plain": [
302
+ "Timestamp('2024-11-26 10:19:30+0000', tz='UTC')"
303
+ ]
304
+ },
305
+ "execution_count": 23,
306
+ "metadata": {},
307
+ "output_type": "execute_result"
308
+ }
309
+ ],
310
+ "source": [
311
+ "max(all_trades_before.creation_timestamp)"
312
+ ]
313
+ },
314
+ {
315
+ "cell_type": "code",
316
+ "execution_count": 28,
317
+ "metadata": {},
318
+ "outputs": [
319
+ {
320
+ "data": {
321
+ "text/plain": [
322
+ "staking\n",
323
+ "non_agent 2376\n",
324
+ "quickstart 672\n",
325
+ "pearl 502\n",
326
+ "non_staking 332\n",
327
+ "Name: count, dtype: int64"
328
+ ]
329
+ },
330
+ "execution_count": 28,
331
+ "metadata": {},
332
+ "output_type": "execute_result"
333
+ }
334
+ ],
335
+ "source": [
336
+ "all_trades_before.staking.value_counts()"
337
+ ]
338
+ },
339
+ {
340
+ "cell_type": "code",
341
+ "execution_count": 7,
342
+ "metadata": {},
343
+ "outputs": [],
344
+ "source": [
345
+ "all_trades_df = pd.read_parquet('../json_data/all_trades_df.parquet')"
346
+ ]
347
+ },
348
+ {
349
+ "cell_type": "code",
350
+ "execution_count": 8,
351
+ "metadata": {},
352
+ "outputs": [
353
+ {
354
+ "data": {
355
+ "text/plain": [
356
+ "Index(['trader_address', 'market_creator', 'trade_id', 'creation_timestamp',\n",
357
+ " 'title', 'market_status', 'collateral_amount', 'outcome_index',\n",
358
+ " 'trade_fee_amount', 'outcomes_tokens_traded', 'current_answer',\n",
359
+ " 'is_invalid', 'winning_trade', 'earnings', 'redeemed',\n",
360
+ " 'redeemed_amount', 'num_mech_calls', 'mech_fee_amount', 'net_earnings',\n",
361
+ " 'roi', 'staking', 'nr_mech_calls'],\n",
362
+ " dtype='object')"
363
+ ]
364
+ },
365
+ "execution_count": 8,
366
+ "metadata": {},
367
+ "output_type": "execute_result"
368
+ }
369
+ ],
370
+ "source": [
371
+ "all_trades_df.columns"
372
+ ]
373
+ },
374
+ {
375
+ "cell_type": "code",
376
+ "execution_count": 9,
377
+ "metadata": {},
378
+ "outputs": [
379
+ {
380
+ "data": {
381
+ "text/plain": [
382
+ "Timestamp('2024-11-23 01:38:25+0000', tz='UTC')"
383
+ ]
384
+ },
385
+ "execution_count": 9,
386
+ "metadata": {},
387
+ "output_type": "execute_result"
388
+ }
389
+ ],
390
+ "source": [
391
+ "max(all_trades_df.creation_timestamp)"
392
+ ]
393
+ },
394
+ {
395
+ "cell_type": "code",
396
+ "execution_count": null,
397
+ "metadata": {},
398
+ "outputs": [],
399
+ "source": []
400
+ },
401
+ {
402
+ "cell_type": "code",
403
+ "execution_count": null,
404
+ "metadata": {},
405
+ "outputs": [],
406
+ "source": []
407
+ }
408
+ ],
409
+ "metadata": {
410
+ "kernelspec": {
411
+ "display_name": "hf_dashboards",
412
+ "language": "python",
413
+ "name": "python3"
414
+ },
415
+ "language_info": {
416
+ "codemirror_mode": {
417
+ "name": "ipython",
418
+ "version": 3
419
+ },
420
+ "file_extension": ".py",
421
+ "mimetype": "text/x-python",
422
+ "name": "python",
423
+ "nbconvert_exporter": "python",
424
+ "pygments_lexer": "ipython3",
425
+ "version": "3.12.2"
426
+ }
427
+ },
428
+ "nbformat": 4,
429
+ "nbformat_minor": 2
430
+ }
scripts/daily_data.py ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ from utils import get_question, current_answer, RPC, measure_execution_time
3
+ from markets import (
4
+ etl as mkt_etl,
5
+ DEFAULT_FILENAME as MARKETS_FILENAME,
6
+ )
7
+ from get_mech_info import (
8
+ get_mech_events_since_last_run,
9
+ update_json_files,
10
+ )
11
+ from pull_data import DATA_DIR, update_json_files, updating_timestamps
12
+ from tools import DEFAULT_FILENAME as TOOLS_FILENAME, generate_tools_file
13
+ from profitability import (
14
+ run_profitability_analysis,
15
+ analyse_all_traders,
16
+ label_trades_by_staking,
17
+ )
18
+ from update_tools_accuracy import compute_tools_accuracy
19
+ import pandas as pd
20
+
21
+ logging.basicConfig(level=logging.INFO)
22
+
23
+
24
+ def prepare_live_metrics(
25
+ tools_filename="new_tools.parquet", trades_filename="new_fpmmTrades.parquet"
26
+ ):
27
+ fpmmTrades = pd.read_parquet(DATA_DIR / trades_filename)
28
+ tools = pd.read_parquet(DATA_DIR / tools_filename)
29
+ print("Analysing trades...")
30
+ all_trades_df = analyse_all_traders(fpmmTrades, tools, daily_info=True)
31
+
32
+ # staking label
33
+ label_trades_by_staking(all_trades_df)
34
+
35
+ # save into a separate file
36
+ all_trades_df.to_parquet(DATA_DIR / "daily_info.parquet", index=False)
37
+
38
+
39
+ @measure_execution_time
40
+ def daily_analysis():
41
+ """Run daily analysis for the FPMMS project."""
42
+ rpc = RPC
43
+ # Run markets ETL
44
+ logging.info("Running markets ETL")
45
+ mkt_etl(MARKETS_FILENAME)
46
+ logging.info("Markets ETL completed")
47
+
48
+ # get only new data
49
+ latest_timestamp = get_mech_events_since_last_run()
50
+ if latest_timestamp == None:
51
+ print("Error while getting the mech events")
52
+ return
53
+ logging.info(f"Finished generating the mech json files from {latest_timestamp}")
54
+
55
+ # Run tools ETL
56
+ logging.info("Generate and parse the tools content")
57
+ # generate only new file
58
+ generate_tools_file("new_tools_info.json", "new_tools.parquet")
59
+ logging.info("Tools ETL completed")
60
+
61
+ # add_current_answer("new_tools.parquet")
62
+
63
+ # # Run profitability analysis
64
+ logging.info("Computing trading metrics")
65
+ run_profitability_analysis(
66
+ rpc=rpc,
67
+ tools_filename="new_tools.parquet",
68
+ trades_filename="new_fpmmTrades.parquet",
69
+ # from_timestamp=int(latest_timestamp.timestamp()),
70
+ from_timestamp=latest_timestamp,
71
+ merge=True,
72
+ )
73
+ logging.info("Profitability analysis completed")
74
+
75
+ # merge new json files with old json files
76
+ update_json_files()
77
+
78
+ try:
79
+ updating_timestamps(rpc, TOOLS_FILENAME)
80
+ except Exception as e:
81
+ logging.error("Error while updating timestamps of tools")
82
+ print(e)
83
+
84
+ compute_tools_accuracy()
85
+
86
+
87
+ if __name__ == "__main__":
88
+ daily_analysis()
89
+ prepare_live_metrics()
scripts/get_mech_info.py CHANGED
@@ -143,14 +143,12 @@ def update_fpmmTrades_parquet(trades_filename: str) -> pd.DataFrame:
143
  ].astype(bool)
144
 
145
  # Check for duplicates
146
- print(f"Initial length before removing duplicates= {len(merge_df)}")
147
 
148
  # Remove duplicates
149
  # fpmm.outcomes is a numpy array
150
- merge_df = merge_df.drop_duplicates(
151
- subset=[col for col in merge_df.columns if col != "fpmm.outcomes"]
152
- )
153
- print(f"Final length before removing duplicates= {len(merge_df)}")
154
 
155
  # save the parquet file
156
  merge_df.to_parquet(DATA_DIR / "fpmmTrades.parquet", index=False)
@@ -169,11 +167,11 @@ def update_all_trades_parquet(new_trades_df: pd.DataFrame) -> pd.DataFrame:
169
  merge_df = pd.concat([old_trades_df, new_trades_df], ignore_index=True)
170
 
171
  # Check for duplicates
172
- print(f"Initial length before removing duplicates= {len(merge_df)}")
173
 
174
  # Remove duplicates
175
- merge_df.drop_duplicates(inplace=True)
176
- print(f"Final length before removing duplicates= {len(merge_df)}")
177
  return merge_df
178
 
179
 
@@ -193,11 +191,11 @@ def update_tools_parquet(new_tools_filename: pd.DataFrame):
193
  merge_df = pd.concat([old_tools_df, new_tools_df], ignore_index=True)
194
 
195
  # Check for duplicates
196
- print(f"Initial length before removing duplicates= {len(merge_df)}")
197
 
198
  # Remove duplicates
199
  merge_df.drop_duplicates(inplace=True)
200
- print(f"Final length before removing duplicates= {len(merge_df)}")
201
 
202
  # save the parquet file
203
  merge_df.to_parquet(DATA_DIR / "tools.parquet", index=False)
 
143
  ].astype(bool)
144
 
145
  # Check for duplicates
146
+ print(f"Initial length before removing duplicates in fpmmTrades= {len(merge_df)}")
147
 
148
  # Remove duplicates
149
  # fpmm.outcomes is a numpy array
150
+ merge_df.drop_duplicates("id", inplace=True)
151
+ print(f"Final length after removing duplicates in fpmmTrades= {len(merge_df)}")
 
 
152
 
153
  # save the parquet file
154
  merge_df.to_parquet(DATA_DIR / "fpmmTrades.parquet", index=False)
 
167
  merge_df = pd.concat([old_trades_df, new_trades_df], ignore_index=True)
168
 
169
  # Check for duplicates
170
+ print(f"Initial length before removing duplicates in all_trades= {len(merge_df)}")
171
 
172
  # Remove duplicates
173
+ merge_df.drop_duplicates("trade_id", inplace=True)
174
+ print(f"Final length after removing duplicates in all_trades = {len(merge_df)}")
175
  return merge_df
176
 
177
 
 
191
  merge_df = pd.concat([old_tools_df, new_tools_df], ignore_index=True)
192
 
193
  # Check for duplicates
194
+ print(f"Initial length before removing duplicates in tools= {len(merge_df)}")
195
 
196
  # Remove duplicates
197
  merge_df.drop_duplicates(inplace=True)
198
+ print(f"Final length after removing duplicates in tools= {len(merge_df)}")
199
 
200
  # save the parquet file
201
  merge_df.to_parquet(DATA_DIR / "tools.parquet", index=False)
scripts/profitability.py CHANGED
@@ -257,7 +257,6 @@ def _is_redeemed(user_json: dict[str, Any], fpmmTrade: dict[str, Any]) -> bool:
257
  """Returns whether the user has redeemed the position."""
258
  user_positions = user_json["data"]["user"]["userPositions"]
259
  condition_id = fpmmTrade["fpmm.condition.id"]
260
-
261
  for position in user_positions:
262
  position_condition_ids = position["position"]["conditionIds"]
263
  balance = int(position["balance"])
@@ -372,9 +371,11 @@ def prepare_profitalibity_data(
372
 
373
  def determine_market_status(trade, current_answer):
374
  """Determine the market status of a trade."""
375
- if current_answer is np.nan and time.time() >= int(trade["fpmm.openingTimestamp"]):
 
 
376
  return MarketState.PENDING
377
- elif current_answer == np.nan:
378
  return MarketState.OPEN
379
  elif trade["fpmm.isPendingArbitration"]:
380
  return MarketState.ARBITRATING
@@ -384,7 +385,10 @@ def determine_market_status(trade, current_answer):
384
 
385
 
386
  def analyse_trader(
387
- trader_address: str, fpmmTrades: pd.DataFrame, tools: pd.DataFrame
 
 
 
388
  ) -> pd.DataFrame:
389
  """Analyse a trader's trades"""
390
  # Filter trades and tools for the given trader
@@ -406,10 +410,12 @@ def analyse_trader(
406
  # Iterate over the trades
407
  for i, trade in tqdm(trades.iterrows(), total=len(trades), desc="Analysing trades"):
408
  try:
409
- if not trade["fpmm.currentAnswer"]:
 
410
  print(f"Skipping trade {i} because currentAnswer is NaN")
411
  continue
412
  # Parsing and computing shared values
 
413
  creation_timestamp_utc = datetime.datetime.fromtimestamp(
414
  int(trade["creationTimestamp"]), tz=datetime.timezone.utc
415
  )
@@ -418,25 +424,29 @@ def analyse_trader(
418
  outcome_tokens_traded = wei_to_unit(float(trade["outcomeTokensTraded"]))
419
  earnings, winner_trade = (0, False)
420
  redemption = _is_redeemed(user_json, trade)
421
- current_answer = trade["fpmm.currentAnswer"]
422
  market_creator = trade["market_creator"]
423
 
424
  # Determine market status
425
  market_status = determine_market_status(trade, current_answer)
426
 
427
  # Skip non-closed markets
428
- if market_status != MarketState.CLOSED:
429
  print(
430
  f"Skipping trade {i} because market is not closed. Market Status: {market_status}"
431
  )
432
  continue
433
- current_answer = convert_hex_to_int(current_answer)
 
434
 
435
  # Compute invalidity
436
  is_invalid = current_answer == INVALID_ANSWER
437
 
438
  # Compute earnings and winner trade status
439
- if is_invalid:
 
 
 
440
  earnings = collateral_amount
441
  winner_trade = False
442
  elif int(trade["outcomeIndex"]) == current_answer:
@@ -491,12 +501,15 @@ def analyse_trader(
491
 
492
  except Exception as e:
493
  print(f"Error processing trade {i}: {e}")
 
494
  continue
495
 
496
  return trades_df
497
 
498
 
499
- def analyse_all_traders(trades: pd.DataFrame, tools: pd.DataFrame) -> pd.DataFrame:
 
 
500
  """Analyse all creators."""
501
  all_traders = []
502
  for trader in tqdm(
@@ -504,7 +517,7 @@ def analyse_all_traders(trades: pd.DataFrame, tools: pd.DataFrame) -> pd.DataFra
504
  total=len(trades["trader_address"].unique()),
505
  desc="Analysing creators",
506
  ):
507
- all_traders.append(analyse_trader(trader, trades, tools))
508
 
509
  # concat all creators
510
  all_creators_df = pd.concat(all_traders)
@@ -573,7 +586,6 @@ def run_profitability_analysis(
573
  rpc, tools_filename, trades_filename, from_timestamp
574
  )
575
 
576
- # all trades profitability df
577
  print("Analysing trades...")
578
  all_trades_df = analyse_all_traders(fpmmTrades, tools)
579
 
@@ -584,7 +596,7 @@ def run_profitability_analysis(
584
  all_trades_df = update_all_trades_parquet(all_trades_df)
585
 
586
  # debugging purposes
587
- all_trades_df.to_parquet(JSON_DATA_DIR / "all_trades_df.parquets")
588
  # filter invalid markets. Condition: "is_invalid" is True
589
  invalid_trades = all_trades_df.loc[all_trades_df["is_invalid"] == True]
590
  if merge:
 
257
  """Returns whether the user has redeemed the position."""
258
  user_positions = user_json["data"]["user"]["userPositions"]
259
  condition_id = fpmmTrade["fpmm.condition.id"]
 
260
  for position in user_positions:
261
  position_condition_ids = position["position"]["conditionIds"]
262
  balance = int(position["balance"])
 
371
 
372
  def determine_market_status(trade, current_answer):
373
  """Determine the market status of a trade."""
374
+ if (current_answer is np.nan or current_answer is None) and time.time() >= int(
375
+ trade["fpmm.openingTimestamp"]
376
+ ):
377
  return MarketState.PENDING
378
+ elif current_answer is np.nan or current_answer is None:
379
  return MarketState.OPEN
380
  elif trade["fpmm.isPendingArbitration"]:
381
  return MarketState.ARBITRATING
 
385
 
386
 
387
  def analyse_trader(
388
+ trader_address: str,
389
+ fpmmTrades: pd.DataFrame,
390
+ tools: pd.DataFrame,
391
+ daily_info: bool = False,
392
  ) -> pd.DataFrame:
393
  """Analyse a trader's trades"""
394
  # Filter trades and tools for the given trader
 
410
  # Iterate over the trades
411
  for i, trade in tqdm(trades.iterrows(), total=len(trades), desc="Analysing trades"):
412
  try:
413
+ market_answer = trade["fpmm.currentAnswer"]
414
+ if not daily_info and not market_answer:
415
  print(f"Skipping trade {i} because currentAnswer is NaN")
416
  continue
417
  # Parsing and computing shared values
418
+
419
  creation_timestamp_utc = datetime.datetime.fromtimestamp(
420
  int(trade["creationTimestamp"]), tz=datetime.timezone.utc
421
  )
 
424
  outcome_tokens_traded = wei_to_unit(float(trade["outcomeTokensTraded"]))
425
  earnings, winner_trade = (0, False)
426
  redemption = _is_redeemed(user_json, trade)
427
+ current_answer = market_answer if market_answer else None
428
  market_creator = trade["market_creator"]
429
 
430
  # Determine market status
431
  market_status = determine_market_status(trade, current_answer)
432
 
433
  # Skip non-closed markets
434
+ if not daily_info and market_status != MarketState.CLOSED:
435
  print(
436
  f"Skipping trade {i} because market is not closed. Market Status: {market_status}"
437
  )
438
  continue
439
+ if current_answer is not None:
440
+ current_answer = convert_hex_to_int(current_answer)
441
 
442
  # Compute invalidity
443
  is_invalid = current_answer == INVALID_ANSWER
444
 
445
  # Compute earnings and winner trade status
446
+ if current_answer is None:
447
+ earnings = 0.0
448
+ winner_trade = None
449
+ elif is_invalid:
450
  earnings = collateral_amount
451
  winner_trade = False
452
  elif int(trade["outcomeIndex"]) == current_answer:
 
501
 
502
  except Exception as e:
503
  print(f"Error processing trade {i}: {e}")
504
+ print(trade)
505
  continue
506
 
507
  return trades_df
508
 
509
 
510
+ def analyse_all_traders(
511
+ trades: pd.DataFrame, tools: pd.DataFrame, daily_info: bool = False
512
+ ) -> pd.DataFrame:
513
  """Analyse all creators."""
514
  all_traders = []
515
  for trader in tqdm(
 
517
  total=len(trades["trader_address"].unique()),
518
  desc="Analysing creators",
519
  ):
520
+ all_traders.append(analyse_trader(trader, trades, tools, daily_info))
521
 
522
  # concat all creators
523
  all_creators_df = pd.concat(all_traders)
 
586
  rpc, tools_filename, trades_filename, from_timestamp
587
  )
588
 
 
589
  print("Analysing trades...")
590
  all_trades_df = analyse_all_traders(fpmmTrades, tools)
591
 
 
596
  all_trades_df = update_all_trades_parquet(all_trades_df)
597
 
598
  # debugging purposes
599
+ all_trades_df.to_parquet(JSON_DATA_DIR / "all_trades_df.parquet")
600
  # filter invalid markets. Condition: "is_invalid" is True
601
  invalid_trades = all_trades_df.loc[all_trades_df["is_invalid"] == True]
602
  if merge:
scripts/pull_data.py CHANGED
@@ -1,5 +1,4 @@
1
  import logging
2
- import os
3
  import pickle
4
  from datetime import datetime
5
  from concurrent.futures import ThreadPoolExecutor
@@ -14,7 +13,7 @@ from markets import (
14
  )
15
  from tools import DEFAULT_FILENAME as TOOLS_FILENAME, generate_tools_file
16
  from profitability import run_profitability_analysis, DEFAULT_60_DAYS_AGO_TIMESTAMP
17
- from utils import get_question, current_answer, RPC
18
  from get_mech_info import (
19
  get_mech_events_last_60_days,
20
  get_mech_events_since_last_run,
@@ -120,12 +119,13 @@ def updating_timestamps(rpc: str, tools_filename: str):
120
  gc.collect()
121
 
122
 
 
123
  def only_new_weekly_analysis():
124
  """Run weekly analysis for the FPMMS project."""
125
  rpc = RPC
126
  # Run markets ETL
127
  logging.info("Running markets ETL")
128
- # mkt_etl(MARKETS_FILENAME)
129
  logging.info("Markets ETL completed")
130
 
131
  # New tools ETL
 
1
  import logging
 
2
  import pickle
3
  from datetime import datetime
4
  from concurrent.futures import ThreadPoolExecutor
 
13
  )
14
  from tools import DEFAULT_FILENAME as TOOLS_FILENAME, generate_tools_file
15
  from profitability import run_profitability_analysis, DEFAULT_60_DAYS_AGO_TIMESTAMP
16
+ from utils import get_question, current_answer, RPC, measure_execution_time
17
  from get_mech_info import (
18
  get_mech_events_last_60_days,
19
  get_mech_events_since_last_run,
 
119
  gc.collect()
120
 
121
 
122
+ @measure_execution_time
123
  def only_new_weekly_analysis():
124
  """Run weekly analysis for the FPMMS project."""
125
  rpc = RPC
126
  # Run markets ETL
127
  logging.info("Running markets ETL")
128
+ mkt_etl(MARKETS_FILENAME)
129
  logging.info("Markets ETL completed")
130
 
131
  # New tools ETL
tabs/daily_graphs.py ADDED
@@ -0,0 +1,215 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pandas as pd
2
+ import gradio as gr
3
+ import gc
4
+ import plotly.express as px
5
+ from plotly.subplots import make_subplots
6
+ import plotly.graph_objects as go
7
+ from datetime import datetime, timedelta
8
+
9
+
10
+ def plot_daily_trades(trades_df: pd.DataFrame) -> gr.Plot:
11
+ # get daily trades
12
+ daily_trades_count = (
13
+ trades_df.groupby("month_year_week").size().reset_index(name="trades")
14
+ )
15
+ daily_trades_count.columns = daily_trades_count.columns.astype(str)
16
+ print("WIP")
17
+
18
+
19
+ def get_current_week_data(trades_df: pd.DataFrame) -> pd.DataFrame:
20
+ # Get current date
21
+ now = datetime.now()
22
+
23
+ # Get start of the current week (Monday)
24
+ start_of_week = now - timedelta(days=now.weekday())
25
+ start_of_week = start_of_week.replace(hour=0, minute=0, second=0, microsecond=0)
26
+ print(f"start of the week = {start_of_week}")
27
+
28
+ # Get end of the current week (Sunday)
29
+ end_of_week = start_of_week + timedelta(days=6)
30
+ end_of_week = end_of_week.replace(hour=23, minute=59, second=59, microsecond=999999)
31
+ print(f"end of the week = {end_of_week}")
32
+ trades_df["creation_date"] = pd.to_datetime(trades_df["creation_date"])
33
+ # Filter the dataframe
34
+ return trades_df[
35
+ (trades_df["creation_date"] >= start_of_week)
36
+ & (trades_df["creation_date"] <= end_of_week)
37
+ ]
38
+
39
+
40
+ def get_boxplot_daily_metrics(
41
+ column_name: str, trades_df: pd.DataFrame
42
+ ) -> pd.DataFrame:
43
+ trades_filtered = trades_df[
44
+ ["creation_timestamp", "creation_date", "market_creator", column_name]
45
+ ]
46
+ # adding the total
47
+ trades_filtered_all = trades_df.copy(deep=True)
48
+ trades_filtered_all["market_creator"] = "all"
49
+
50
+ # merging both dataframes
51
+ all_filtered_trades = pd.concat(
52
+ [trades_filtered, trades_filtered_all], ignore_index=True
53
+ )
54
+ all_filtered_trades = all_filtered_trades.sort_values(
55
+ by="creation_timestamp", ascending=True
56
+ )
57
+ gc.collect()
58
+ return all_filtered_trades
59
+
60
+
61
+ def plot_daily_metrics(
62
+ metric_name: str, trades_df: pd.DataFrame, trader_filter: str = None
63
+ ) -> gr.Plot:
64
+ """Plots the trade metrics."""
65
+
66
+ if metric_name == "mech calls":
67
+ metric_name = "mech_calls"
68
+ column_name = "num_mech_calls"
69
+ yaxis_title = "Nr of mech calls per trade"
70
+ elif metric_name == "ROI":
71
+ column_name = "roi"
72
+ yaxis_title = "ROI (net profit/cost)"
73
+ elif metric_name == "collateral amount":
74
+ metric_name = "collateral_amount"
75
+ column_name = metric_name
76
+ yaxis_title = "Collateral amount per trade (xDAI)"
77
+ elif metric_name == "net earnings":
78
+ metric_name = "net_earnings"
79
+ column_name = metric_name
80
+ yaxis_title = "Net profit per trade (xDAI)"
81
+ else: # earnings
82
+ column_name = metric_name
83
+ yaxis_title = "Gross profit per trade (xDAI)"
84
+
85
+ color_discrete = ["purple", "darkgoldenrod", "darkgreen"]
86
+
87
+ if trader_filter == "agent":
88
+ trades_filtered = get_boxplot_daily_metrics(
89
+ column_name, trades_df.loc[trades_df["staking"] != "non_agent"]
90
+ )
91
+ color_discrete = ["darkviolet", "goldenrod", "green"]
92
+ elif trader_filter == "non_agent":
93
+ trades_filtered = get_boxplot_daily_metrics(
94
+ column_name, trades_df.loc[trades_df["staking"] == "non_agent"]
95
+ )
96
+ else:
97
+ trades_filtered = get_boxplot_daily_metrics(column_name, trades_df)
98
+ color_mapping = [
99
+ "darkviolet",
100
+ "purple",
101
+ "goldenrod",
102
+ "darkgoldenrod",
103
+ "green",
104
+ "darkgreen",
105
+ ]
106
+
107
+ # Create binary staking category
108
+ trades_filtered["trader_type"] = trades_filtered["staking"].apply(
109
+ lambda x: "non_agent" if x == "non_agent" else "agent"
110
+ )
111
+ trades_filtered["trader_market"] = trades_filtered.apply(
112
+ lambda x: (x["trader_type"], x["market_creator"]), axis=1
113
+ )
114
+ fig = px.box(
115
+ trades_filtered,
116
+ x="creation_date",
117
+ y=column_name,
118
+ color="trader_market",
119
+ color_discrete_sequence=color_mapping,
120
+ category_orders={
121
+ "market_creator": ["pearl", "quickstart", "all"],
122
+ "trader_market": [
123
+ ("agent", "pearl"),
124
+ ("non_agent", "pearl"),
125
+ ("agent", "quickstart"),
126
+ ("non_agent", "quickstart"),
127
+ ("agent", "all"),
128
+ ("non_agent", "all"),
129
+ ],
130
+ },
131
+ facet_col="market_creator",
132
+ )
133
+ fig.update_traces(boxmean=True)
134
+ fig.update_layout(
135
+ xaxis_title="Day",
136
+ yaxis_title=yaxis_title,
137
+ legend=dict(yanchor="top", y=0.5),
138
+ )
139
+ for axis in fig.layout:
140
+ if axis.startswith("xaxis"):
141
+ fig.layout[axis].update(title="Day")
142
+ fig.update_xaxes(tickformat="%b %d")
143
+ return gr.Plot(
144
+ value=fig,
145
+ )
146
+
147
+
148
+ def plot_daily_metrics_v2(
149
+ metric_name: str, trades_df: pd.DataFrame, trader_filter: str = None
150
+ ) -> gr.Plot:
151
+ """Plots the trade metrics."""
152
+
153
+ if metric_name == "mech calls":
154
+ metric_name = "mech_calls"
155
+ column_name = "num_mech_calls"
156
+ yaxis_title = "Nr of mech calls per trade"
157
+ elif metric_name == "ROI":
158
+ column_name = "roi"
159
+ yaxis_title = "ROI (net profit/cost)"
160
+ elif metric_name == "collateral amount":
161
+ metric_name = "collateral_amount"
162
+ column_name = metric_name
163
+ yaxis_title = "Collateral amount per trade (xDAI)"
164
+ elif metric_name == "net earnings":
165
+ metric_name = "net_earnings"
166
+ column_name = metric_name
167
+ yaxis_title = "Net profit per trade (xDAI)"
168
+ else: # earnings
169
+ column_name = metric_name
170
+ yaxis_title = "Gross profit per trade (xDAI)"
171
+
172
+ color_discrete = ["purple", "darkgoldenrod", "darkgreen"]
173
+ trades_filtered = get_boxplot_daily_metrics(column_name, trades_df)
174
+ fig = make_subplots(rows=1, cols=2, subplot_titles=("Agent", "Non-Agents"))
175
+
176
+ # Create first boxplot for staking=True
177
+ fig.add_trace(
178
+ go.Box(
179
+ x=trades_filtered[trades_filtered["staking"] != "non_agent"][
180
+ "creation_date"
181
+ ],
182
+ y=trades_filtered[trades_filtered["staking"] != "non_agent"][column_name],
183
+ name="Trades from agents",
184
+ marker_color=color_discrete[0],
185
+ legendgroup="staking_true",
186
+ showlegend=True,
187
+ ),
188
+ row=1,
189
+ col=1,
190
+ )
191
+
192
+ # Create second boxplot for staking=False
193
+ fig.add_trace(
194
+ go.Box(
195
+ x=trades_filtered[trades_filtered["staking"] == False]["creation_date"],
196
+ y=trades_filtered[trades_filtered["staking"] == False][column_name],
197
+ name="Staking False",
198
+ marker_color=color_discrete[1],
199
+ legendgroup="staking_false",
200
+ showlegend=True,
201
+ ),
202
+ row=1,
203
+ col=2,
204
+ )
205
+
206
+ # Update layout
207
+ fig.update_layout(
208
+ height=600,
209
+ width=1200,
210
+ title_text=f"Box Plot of {column_name} by Staking Status",
211
+ showlegend=True,
212
+ )
213
+
214
+ # Update y-axes to have the same range
215
+ fig.update_yaxes(matches="y")
tabs/metrics.py CHANGED
@@ -2,7 +2,6 @@ import pandas as pd
2
  import gradio as gr
3
  import plotly.express as px
4
  import gc
5
- import matplotlib.pyplot as plt
6
 
7
  trade_metric_choices = [
8
  "mech calls",
@@ -12,6 +11,11 @@ trade_metric_choices = [
12
  "ROI",
13
  ]
14
 
 
 
 
 
 
15
  tool_metric_choices = {
16
  "Weekly Mean Mech Tool Accuracy as (Accurate Responses/All) %": "win_perc",
17
  "Total Weekly Inaccurate Nr of Mech Tool Responses": "losses",
@@ -20,6 +24,7 @@ tool_metric_choices = {
20
  }
21
 
22
  default_trade_metric = "ROI"
 
23
  default_tool_metric = "Weekly Mean Mech Tool Accuracy as (Accurate Responses/All) %"
24
 
25
  HEIGHT = 600
@@ -142,10 +147,12 @@ def plot_trade_metrics(
142
  column_name = metric_name
143
  yaxis_title = "Gross profit per trade (xDAI)"
144
 
 
145
  if trader_filter == "agent":
146
  trades_filtered = get_boxplot_metrics(
147
  column_name, trades_df.loc[trades_df["staking"] != "non_agent"]
148
  )
 
149
  elif trader_filter == "non_agent":
150
  trades_filtered = get_boxplot_metrics(
151
  column_name, trades_df.loc[trades_df["staking"] == "non_agent"]
@@ -157,7 +164,7 @@ def plot_trade_metrics(
157
  x="month_year_week",
158
  y=column_name,
159
  color="market_creator",
160
- color_discrete_sequence=["purple", "goldenrod", "darkgreen"],
161
  category_orders={"market_creator": ["pearl", "quickstart", "all"]},
162
  )
163
  fig.update_traces(boxmean=True)
 
2
  import gradio as gr
3
  import plotly.express as px
4
  import gc
 
5
 
6
  trade_metric_choices = [
7
  "mech calls",
 
11
  "ROI",
12
  ]
13
 
14
+ trade_daily_metric_choices = [
15
+ "mech calls",
16
+ "collateral amount",
17
+ ]
18
+
19
  tool_metric_choices = {
20
  "Weekly Mean Mech Tool Accuracy as (Accurate Responses/All) %": "win_perc",
21
  "Total Weekly Inaccurate Nr of Mech Tool Responses": "losses",
 
24
  }
25
 
26
  default_trade_metric = "ROI"
27
+ default_daily_metric = "collateral amount"
28
  default_tool_metric = "Weekly Mean Mech Tool Accuracy as (Accurate Responses/All) %"
29
 
30
  HEIGHT = 600
 
147
  column_name = metric_name
148
  yaxis_title = "Gross profit per trade (xDAI)"
149
 
150
+ color_discrete = ["purple", "darkgoldenrod", "darkgreen"]
151
  if trader_filter == "agent":
152
  trades_filtered = get_boxplot_metrics(
153
  column_name, trades_df.loc[trades_df["staking"] != "non_agent"]
154
  )
155
+ color_discrete = ["darkviolet", "goldenrod", "green"]
156
  elif trader_filter == "non_agent":
157
  trades_filtered = get_boxplot_metrics(
158
  column_name, trades_df.loc[trades_df["staking"] == "non_agent"]
 
164
  x="month_year_week",
165
  y=column_name,
166
  color="market_creator",
167
+ color_discrete_sequence=color_discrete,
168
  category_orders={"market_creator": ["pearl", "quickstart", "all"]},
169
  )
170
  fig.update_traces(boxmean=True)
tabs/trades.py CHANGED
@@ -12,6 +12,7 @@ WIDTH = 1100
12
  def prepare_trades(trades_df: pd.DataFrame) -> pd.DataFrame:
13
  """Prepares the trades data for analysis."""
14
  trades_df["creation_timestamp"] = pd.to_datetime(trades_df["creation_timestamp"])
 
15
  trades_df["creation_timestamp"] = trades_df["creation_timestamp"].dt.tz_convert(
16
  "UTC"
17
  )
 
12
  def prepare_trades(trades_df: pd.DataFrame) -> pd.DataFrame:
13
  """Prepares the trades data for analysis."""
14
  trades_df["creation_timestamp"] = pd.to_datetime(trades_df["creation_timestamp"])
15
+ trades_df["creation_date"] = trades_df["creation_timestamp"].dt.date
16
  trades_df["creation_timestamp"] = trades_df["creation_timestamp"].dt.tz_convert(
17
  "UTC"
18
  )