riasharma commited on
Commit
80bebfc
·
1 Parent(s): 6467453

End of training

Browse files
README.md CHANGED
@@ -15,14 +15,14 @@ should probably proofread and complete it, then remove this comment. -->
15
 
16
  This model is a fine-tuned version of [microsoft/layoutlm-base-uncased](https://huggingface.co/microsoft/layoutlm-base-uncased) on an unknown dataset.
17
  It achieves the following results on the evaluation set:
18
- - Loss: 0.7235
19
- - Answer: {'precision': 0.6962719298245614, 'recall': 0.7849196538936959, 'f1': 0.73794305636258, 'number': 809}
20
- - Header: {'precision': 0.27692307692307694, 'recall': 0.3025210084033613, 'f1': 0.2891566265060241, 'number': 119}
21
- - Question: {'precision': 0.7558039552880481, 'recall': 0.8253521126760563, 'f1': 0.789048473967684, 'number': 1065}
22
- - Overall Precision: 0.7029
23
- - Overall Recall: 0.7777
24
- - Overall F1: 0.7384
25
- - Overall Accuracy: 0.7998
26
 
27
  ## Model description
28
 
@@ -52,23 +52,23 @@ The following hyperparameters were used during training:
52
 
53
  ### Training results
54
 
55
- | Training Loss | Epoch | Step | Validation Loss | Answer | Header | Question | Overall Precision | Overall Recall | Overall F1 | Overall Accuracy |
56
- |:-------------:|:-----:|:----:|:---------------:|:-------------------------------------------------------------------------------------------------------------:|:------------------------------------------------------------------------------------------------------------:|:---------------------------------------------------------------------------------------------------------:|:-----------------:|:--------------:|:----------:|:----------------:|
57
- | 1.7228 | 1.0 | 10 | 1.5183 | {'precision': 0.060676779463243874, 'recall': 0.06427688504326329, 'f1': 0.062424969987995196, 'number': 809} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 119} | {'precision': 0.3057324840764331, 'recall': 0.4056338028169014, 'f1': 0.3486682808716707, 'number': 1065} | 0.2132 | 0.2428 | 0.2271 | 0.4422 |
58
- | 1.3399 | 2.0 | 20 | 1.1666 | {'precision': 0.27170868347338933, 'recall': 0.23980222496909764, 'f1': 0.25476034143138543, 'number': 809} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 119} | {'precision': 0.45522949586155004, 'recall': 0.568075117370892, 'f1': 0.5054302422723475, 'number': 1065} | 0.3911 | 0.4009 | 0.3959 | 0.6011 |
59
- | 1.04 | 3.0 | 30 | 0.9328 | {'precision': 0.47839195979899496, 'recall': 0.588380716934487, 'f1': 0.5277161862527716, 'number': 809} | {'precision': 0.06818181818181818, 'recall': 0.025210084033613446, 'f1': 0.03680981595092025, 'number': 119} | {'precision': 0.6219201359388276, 'recall': 0.6873239436619718, 'f1': 0.6529884032114184, 'number': 1065} | 0.5465 | 0.6076 | 0.5754 | 0.7133 |
60
- | 0.8105 | 4.0 | 40 | 0.7992 | {'precision': 0.5817060637204522, 'recall': 0.6996291718170581, 'f1': 0.6352413019079686, 'number': 809} | {'precision': 0.0963855421686747, 'recall': 0.06722689075630252, 'f1': 0.07920792079207921, 'number': 119} | {'precision': 0.6542904290429042, 'recall': 0.7446009389671362, 'f1': 0.696530522617479, 'number': 1065} | 0.6027 | 0.6859 | 0.6416 | 0.7516 |
61
- | 0.6523 | 5.0 | 50 | 0.7333 | {'precision': 0.6176470588235294, 'recall': 0.7527812113720643, 'f1': 0.6785515320334262, 'number': 809} | {'precision': 0.20430107526881722, 'recall': 0.15966386554621848, 'f1': 0.1792452830188679, 'number': 119} | {'precision': 0.6836393989983306, 'recall': 0.7690140845070422, 'f1': 0.7238179407865665, 'number': 1065} | 0.6355 | 0.7260 | 0.6778 | 0.7724 |
62
- | 0.5591 | 6.0 | 60 | 0.7152 | {'precision': 0.6452304394426581, 'recall': 0.7441285537700866, 'f1': 0.6911595866819749, 'number': 809} | {'precision': 0.2222222222222222, 'recall': 0.18487394957983194, 'f1': 0.2018348623853211, 'number': 119} | {'precision': 0.6821086261980831, 'recall': 0.8018779342723005, 'f1': 0.7371601208459214, 'number': 1065} | 0.6471 | 0.7416 | 0.6911 | 0.7845 |
63
- | 0.494 | 7.0 | 70 | 0.6953 | {'precision': 0.652542372881356, 'recall': 0.761433868974042, 'f1': 0.7027952082144895, 'number': 809} | {'precision': 0.2184873949579832, 'recall': 0.2184873949579832, 'f1': 0.2184873949579832, 'number': 119} | {'precision': 0.7113316790736146, 'recall': 0.8075117370892019, 'f1': 0.7563764291996481, 'number': 1065} | 0.6611 | 0.7536 | 0.7043 | 0.7893 |
64
- | 0.4345 | 8.0 | 80 | 0.6955 | {'precision': 0.6485042735042735, 'recall': 0.7503090234857849, 'f1': 0.695702005730659, 'number': 809} | {'precision': 0.23809523809523808, 'recall': 0.25210084033613445, 'f1': 0.24489795918367344, 'number': 119} | {'precision': 0.7281632653061224, 'recall': 0.8375586854460094, 'f1': 0.7790393013100436, 'number': 1065} | 0.6686 | 0.7672 | 0.7145 | 0.7936 |
65
- | 0.3786 | 9.0 | 90 | 0.7151 | {'precision': 0.6762513312034079, 'recall': 0.7849196538936959, 'f1': 0.7265446224256292, 'number': 809} | {'precision': 0.24817518248175183, 'recall': 0.2857142857142857, 'f1': 0.265625, 'number': 119} | {'precision': 0.7582515611061552, 'recall': 0.7981220657276995, 'f1': 0.7776761207685269, 'number': 1065} | 0.6914 | 0.7622 | 0.7251 | 0.7907 |
66
- | 0.3465 | 10.0 | 100 | 0.7036 | {'precision': 0.6802197802197802, 'recall': 0.765142150803461, 'f1': 0.7201861547411287, 'number': 809} | {'precision': 0.2777777777777778, 'recall': 0.29411764705882354, 'f1': 0.28571428571428575, 'number': 119} | {'precision': 0.7470588235294118, 'recall': 0.8347417840375587, 'f1': 0.7884700665188471, 'number': 1065} | 0.6932 | 0.7742 | 0.7315 | 0.8004 |
67
- | 0.3289 | 11.0 | 110 | 0.7109 | {'precision': 0.6814734561213435, 'recall': 0.7775030902348579, 'f1': 0.7263279445727483, 'number': 809} | {'precision': 0.2692307692307692, 'recall': 0.29411764705882354, 'f1': 0.28112449799196787, 'number': 119} | {'precision': 0.7449832775919732, 'recall': 0.8366197183098592, 'f1': 0.7881468376824414, 'number': 1065} | 0.6914 | 0.7802 | 0.7331 | 0.7950 |
68
- | 0.3066 | 12.0 | 120 | 0.7106 | {'precision': 0.6941694169416942, 'recall': 0.7799752781211372, 'f1': 0.7345750873108267, 'number': 809} | {'precision': 0.2868217054263566, 'recall': 0.31092436974789917, 'f1': 0.2983870967741935, 'number': 119} | {'precision': 0.7540425531914894, 'recall': 0.831924882629108, 'f1': 0.7910714285714286, 'number': 1065} | 0.7022 | 0.7797 | 0.7389 | 0.7980 |
69
- | 0.2914 | 13.0 | 130 | 0.7253 | {'precision': 0.6913849509269356, 'recall': 0.7836835599505563, 'f1': 0.7346465816917729, 'number': 809} | {'precision': 0.2642857142857143, 'recall': 0.31092436974789917, 'f1': 0.28571428571428575, 'number': 119} | {'precision': 0.7402707275803723, 'recall': 0.8215962441314554, 'f1': 0.778816199376947, 'number': 1065} | 0.6905 | 0.7757 | 0.7306 | 0.7956 |
70
- | 0.2751 | 14.0 | 140 | 0.7191 | {'precision': 0.6818181818181818, 'recall': 0.7787391841779975, 'f1': 0.7270628967109058, 'number': 809} | {'precision': 0.2748091603053435, 'recall': 0.3025210084033613, 'f1': 0.288, 'number': 119} | {'precision': 0.7474489795918368, 'recall': 0.8253521126760563, 'f1': 0.784471218206158, 'number': 1065} | 0.6925 | 0.7752 | 0.7315 | 0.7991 |
71
- | 0.2769 | 15.0 | 150 | 0.7235 | {'precision': 0.6962719298245614, 'recall': 0.7849196538936959, 'f1': 0.73794305636258, 'number': 809} | {'precision': 0.27692307692307694, 'recall': 0.3025210084033613, 'f1': 0.2891566265060241, 'number': 119} | {'precision': 0.7558039552880481, 'recall': 0.8253521126760563, 'f1': 0.789048473967684, 'number': 1065} | 0.7029 | 0.7777 | 0.7384 | 0.7998 |
72
 
73
 
74
  ### Framework versions
 
15
 
16
  This model is a fine-tuned version of [microsoft/layoutlm-base-uncased](https://huggingface.co/microsoft/layoutlm-base-uncased) on an unknown dataset.
17
  It achieves the following results on the evaluation set:
18
+ - Loss: 0.6624
19
+ - Answer: {'precision': 0.7003222341568206, 'recall': 0.8059332509270705, 'f1': 0.7494252873563217, 'number': 809}
20
+ - Header: {'precision': 0.3148148148148148, 'recall': 0.2857142857142857, 'f1': 0.29955947136563876, 'number': 119}
21
+ - Question: {'precision': 0.7602441150828247, 'recall': 0.8187793427230047, 'f1': 0.7884267631103073, 'number': 1065}
22
+ - Overall Precision: 0.7127
23
+ - Overall Recall: 0.7817
24
+ - Overall F1: 0.7456
25
+ - Overall Accuracy: 0.8098
26
 
27
  ## Model description
28
 
 
52
 
53
  ### Training results
54
 
55
+ | Training Loss | Epoch | Step | Validation Loss | Answer | Header | Question | Overall Precision | Overall Recall | Overall F1 | Overall Accuracy |
56
+ |:-------------:|:-----:|:----:|:---------------:|:-------------------------------------------------------------------------------------------------------------:|:------------------------------------------------------------------------------------------------------------:|:------------------------------------------------------------------------------------------------------------:|:-----------------:|:--------------:|:----------:|:----------------:|
57
+ | 1.8207 | 1.0 | 10 | 1.6331 | {'precision': 0.01676829268292683, 'recall': 0.013597033374536464, 'f1': 0.015017064846416382, 'number': 809} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 119} | {'precision': 0.21189024390243902, 'recall': 0.13051643192488263, 'f1': 0.16153399186519465, 'number': 1065} | 0.1143 | 0.0753 | 0.0908 | 0.3429 |
58
+ | 1.4867 | 2.0 | 20 | 1.3144 | {'precision': 0.13937282229965156, 'recall': 0.14833127317676142, 'f1': 0.14371257485029942, 'number': 809} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 119} | {'precision': 0.4092178770949721, 'recall': 0.5502347417840375, 'f1': 0.4693632358830597, 'number': 1065} | 0.3079 | 0.3542 | 0.3294 | 0.5753 |
59
+ | 1.1706 | 3.0 | 30 | 1.0082 | {'precision': 0.4507042253521127, 'recall': 0.553770086526576, 'f1': 0.4969495285635052, 'number': 809} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 119} | {'precision': 0.5885810243492863, 'recall': 0.6582159624413145, 'f1': 0.6214539007092199, 'number': 1065} | 0.5237 | 0.5765 | 0.5488 | 0.6721 |
60
+ | 0.8874 | 4.0 | 40 | 0.8115 | {'precision': 0.6029106029106029, 'recall': 0.7169344870210136, 'f1': 0.6549971767363072, 'number': 809} | {'precision': 0.05714285714285714, 'recall': 0.01680672268907563, 'f1': 0.025974025974025972, 'number': 119} | {'precision': 0.649792531120332, 'recall': 0.7352112676056338, 'f1': 0.6898678414096917, 'number': 1065} | 0.6199 | 0.6849 | 0.6508 | 0.7517 |
61
+ | 0.7072 | 5.0 | 50 | 0.7206 | {'precision': 0.6341948310139165, 'recall': 0.788627935723115, 'f1': 0.7030303030303031, 'number': 809} | {'precision': 0.18032786885245902, 'recall': 0.09243697478991597, 'f1': 0.12222222222222223, 'number': 119} | {'precision': 0.696551724137931, 'recall': 0.7586854460093897, 'f1': 0.7262921348314607, 'number': 1065} | 0.6542 | 0.7311 | 0.6905 | 0.7725 |
62
+ | 0.5896 | 6.0 | 60 | 0.6813 | {'precision': 0.6571428571428571, 'recall': 0.796044499381953, 'f1': 0.7199552822806037, 'number': 809} | {'precision': 0.1746031746031746, 'recall': 0.09243697478991597, 'f1': 0.12087912087912087, 'number': 119} | {'precision': 0.7217981340118744, 'recall': 0.7990610328638498, 'f1': 0.7584670231729055, 'number': 1065} | 0.6778 | 0.7556 | 0.7146 | 0.7867 |
63
+ | 0.5193 | 7.0 | 70 | 0.6605 | {'precision': 0.6949516648764769, 'recall': 0.799752781211372, 'f1': 0.7436781609195402, 'number': 809} | {'precision': 0.20618556701030927, 'recall': 0.16806722689075632, 'f1': 0.1851851851851852, 'number': 119} | {'precision': 0.734468085106383, 'recall': 0.8103286384976526, 'f1': 0.7705357142857142, 'number': 1065} | 0.6945 | 0.7677 | 0.7293 | 0.7979 |
64
+ | 0.4591 | 8.0 | 80 | 0.6473 | {'precision': 0.6922246220302376, 'recall': 0.792336217552534, 'f1': 0.7389048991354467, 'number': 809} | {'precision': 0.24, 'recall': 0.20168067226890757, 'f1': 0.2191780821917808, 'number': 119} | {'precision': 0.7382154882154882, 'recall': 0.8234741784037559, 'f1': 0.7785175321793164, 'number': 1065} | 0.6965 | 0.7737 | 0.7331 | 0.8059 |
65
+ | 0.3939 | 9.0 | 90 | 0.6369 | {'precision': 0.6886291179596175, 'recall': 0.8009888751545118, 'f1': 0.7405714285714285, 'number': 809} | {'precision': 0.2777777777777778, 'recall': 0.25210084033613445, 'f1': 0.2643171806167401, 'number': 119} | {'precision': 0.7515047291487532, 'recall': 0.8206572769953052, 'f1': 0.784560143626571, 'number': 1065} | 0.7016 | 0.7787 | 0.7382 | 0.8088 |
66
+ | 0.3604 | 10.0 | 100 | 0.6514 | {'precision': 0.6954643628509719, 'recall': 0.796044499381953, 'f1': 0.7423631123919308, 'number': 809} | {'precision': 0.29, 'recall': 0.24369747899159663, 'f1': 0.2648401826484018, 'number': 119} | {'precision': 0.7665505226480837, 'recall': 0.8262910798122066, 'f1': 0.7953004970628107, 'number': 1065} | 0.7144 | 0.7792 | 0.7454 | 0.8125 |
67
+ | 0.3344 | 11.0 | 110 | 0.6505 | {'precision': 0.7031419284940412, 'recall': 0.8022249690976514, 'f1': 0.7494226327944574, 'number': 809} | {'precision': 0.3153153153153153, 'recall': 0.29411764705882354, 'f1': 0.30434782608695654, 'number': 119} | {'precision': 0.755632582322357, 'recall': 0.8187793427230047, 'f1': 0.7859396124380351, 'number': 1065} | 0.7112 | 0.7807 | 0.7443 | 0.8087 |
68
+ | 0.3144 | 12.0 | 120 | 0.6461 | {'precision': 0.6973262032085561, 'recall': 0.8059332509270705, 'f1': 0.7477064220183487, 'number': 809} | {'precision': 0.3119266055045872, 'recall': 0.2857142857142857, 'f1': 0.2982456140350877, 'number': 119} | {'precision': 0.7590051457975986, 'recall': 0.8309859154929577, 'f1': 0.7933662034961901, 'number': 1065} | 0.7109 | 0.7883 | 0.7476 | 0.8137 |
69
+ | 0.2976 | 13.0 | 130 | 0.6569 | {'precision': 0.6925531914893617, 'recall': 0.8046971569839307, 'f1': 0.7444253859348199, 'number': 809} | {'precision': 0.3025210084033613, 'recall': 0.3025210084033613, 'f1': 0.3025210084033613, 'number': 119} | {'precision': 0.7586805555555556, 'recall': 0.8206572769953052, 'f1': 0.7884528642309426, 'number': 1065} | 0.7060 | 0.7832 | 0.7426 | 0.8094 |
70
+ | 0.2876 | 14.0 | 140 | 0.6629 | {'precision': 0.7034632034632035, 'recall': 0.8034610630407911, 'f1': 0.7501442585112521, 'number': 809} | {'precision': 0.3148148148148148, 'recall': 0.2857142857142857, 'f1': 0.29955947136563876, 'number': 119} | {'precision': 0.7657894736842106, 'recall': 0.819718309859155, 'f1': 0.7918367346938776, 'number': 1065} | 0.7169 | 0.7812 | 0.7477 | 0.8104 |
71
+ | 0.2877 | 15.0 | 150 | 0.6624 | {'precision': 0.7003222341568206, 'recall': 0.8059332509270705, 'f1': 0.7494252873563217, 'number': 809} | {'precision': 0.3148148148148148, 'recall': 0.2857142857142857, 'f1': 0.29955947136563876, 'number': 119} | {'precision': 0.7602441150828247, 'recall': 0.8187793427230047, 'f1': 0.7884267631103073, 'number': 1065} | 0.7127 | 0.7817 | 0.7456 | 0.8098 |
72
 
73
 
74
  ### Framework versions
logs/events.out.tfevents.1703748079.dlmachine2.188101.0 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:715815c0439ac373b54741740633177845ccf50a06c1b89d9a717408627038f3
3
- size 13005
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:65019d1b0ae5a6590022ceb7040d106ccd71f8ec2c70b2d4389deb12c20e65a2
3
+ size 14681
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:95deaac3e5313b8d27abfd1fc8f3839b7569232f910f7086a5e1a411d6a46cef
3
  size 450558212
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:56b2ba0e41fa368d458fa43906093b2036dcbf194a390298018d030961180609
3
  size 450558212
tokenizer.json CHANGED
@@ -1,21 +1,7 @@
1
  {
2
  "version": "1.0",
3
- "truncation": {
4
- "direction": "Right",
5
- "max_length": 512,
6
- "strategy": "LongestFirst",
7
- "stride": 0
8
- },
9
- "padding": {
10
- "strategy": {
11
- "Fixed": 512
12
- },
13
- "direction": "Right",
14
- "pad_to_multiple_of": null,
15
- "pad_id": 0,
16
- "pad_type_id": 0,
17
- "pad_token": "[PAD]"
18
- },
19
  "added_tokens": [
20
  {
21
  "id": 0,
 
1
  {
2
  "version": "1.0",
3
+ "truncation": null,
4
+ "padding": null,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  "added_tokens": [
6
  {
7
  "id": 0,