|
{ |
|
"epoch": 200.0, |
|
"eval_accuracy": 78.44827586206897, |
|
"eval_average_metrics": 77.31764307402186, |
|
"eval_classification_report": "{\"acceptance\": {\"precision\": 0.75, \"recall\": 0.6666666666666666, \"f1-score\": 0.7058823529411765, \"support\": 9.0}, \"accusation\": {\"precision\": 0.9, \"recall\": 0.75, \"f1-score\": 0.8181818181818182, \"support\": 12.0}, \"appreciation\": {\"precision\": 0.6666666666666666, \"recall\": 0.8888888888888888, \"f1-score\": 0.761904761904762, \"support\": 9.0}, \"challenge\": {\"precision\": 0.6923076923076923, \"recall\": 0.8709677419354839, \"f1-score\": 0.7714285714285716, \"support\": 31.0}, \"informing statement\": {\"precision\": 0.8275862068965517, \"recall\": 0.6486486486486487, \"f1-score\": 0.7272727272727273, \"support\": 37.0}, \"question\": {\"precision\": 1.0, \"recall\": 1.0, \"f1-score\": 1.0, \"support\": 1.0}, \"rejection\": {\"precision\": 0.9, \"recall\": 1.0, \"f1-score\": 0.9473684210526316, \"support\": 9.0}, \"request\": {\"precision\": 1.0, \"recall\": 0.875, \"f1-score\": 0.9333333333333333, \"support\": 8.0}, \"accuracy\": 0.7844827586206896, \"macro avg\": {\"precision\": 0.8420700707338638, \"recall\": 0.8375214932674611, \"f1-score\": 0.8331714982643775, \"support\": 116.0}, \"weighted avg\": {\"precision\": 0.7994157596268179, \"recall\": 0.7844827586206896, \"f1-score\": 0.7831433183733814, \"support\": 116.0}}", |
|
"eval_f1_macro": 74.05968873461136, |
|
"eval_f1_micro": 78.44827586206897, |
|
"eval_f1_weighted": 78.31433183733813, |
|
"eval_loss": 0.5467008948326111, |
|
"eval_runtime": 1.2934, |
|
"eval_samples_per_second": 89.689, |
|
"init_mem_cpu_alloc_delta": -542822400, |
|
"init_mem_cpu_peaked_delta": 542826496, |
|
"init_mem_gpu_alloc_delta": 891528192, |
|
"init_mem_gpu_peaked_delta": 0, |
|
"peak_memory": 5.305544921875, |
|
"test_accuracy": 78.44827586206897, |
|
"test_average_metrics": 77.31764307402186, |
|
"test_classification_report": "{\"acceptance\": {\"precision\": 0.75, \"recall\": 0.6666666666666666, \"f1-score\": 0.7058823529411765, \"support\": 9.0}, \"accusation\": {\"precision\": 0.9, \"recall\": 0.75, \"f1-score\": 0.8181818181818182, \"support\": 12.0}, \"appreciation\": {\"precision\": 0.6666666666666666, \"recall\": 0.8888888888888888, \"f1-score\": 0.761904761904762, \"support\": 9.0}, \"challenge\": {\"precision\": 0.6923076923076923, \"recall\": 0.8709677419354839, \"f1-score\": 0.7714285714285716, \"support\": 31.0}, \"informing statement\": {\"precision\": 0.8275862068965517, \"recall\": 0.6486486486486487, \"f1-score\": 0.7272727272727273, \"support\": 37.0}, \"question\": {\"precision\": 1.0, \"recall\": 1.0, \"f1-score\": 1.0, \"support\": 1.0}, \"rejection\": {\"precision\": 0.9, \"recall\": 1.0, \"f1-score\": 0.9473684210526316, \"support\": 9.0}, \"request\": {\"precision\": 1.0, \"recall\": 0.875, \"f1-score\": 0.9333333333333333, \"support\": 8.0}, \"accuracy\": 0.7844827586206896, \"macro avg\": {\"precision\": 0.8420700707338638, \"recall\": 0.8375214932674611, \"f1-score\": 0.8331714982643775, \"support\": 116.0}, \"weighted avg\": {\"precision\": 0.7994157596268179, \"recall\": 0.7844827586206896, \"f1-score\": 0.7831433183733814, \"support\": 116.0}}", |
|
"test_f1_macro": 74.05968873461136, |
|
"test_f1_micro": 78.44827586206897, |
|
"test_f1_weighted": 78.31433183733813, |
|
"test_loss": 0.5467008948326111, |
|
"test_runtime": 1.2778, |
|
"test_samples_per_second": 90.778, |
|
"total_time in minutes ": 41.508179166666665, |
|
"train_mem_cpu_alloc_delta": 945156096, |
|
"train_mem_cpu_peaked_delta": 40960, |
|
"train_mem_gpu_alloc_delta": 40216576, |
|
"train_mem_gpu_peaked_delta": 4624272384, |
|
"train_runtime": 2488.3254, |
|
"train_samples": 488, |
|
"train_samples_per_second": 1.286 |
|
} |