aliasgerovs commited on
Commit
d22f052
·
1 Parent(s): 9ea96b4
Files changed (5) hide show
  1. .gitignore +1 -1
  2. highlighter.py +1 -1
  3. nohup.out +767 -177
  4. predictors.py +12 -4
  5. requirements.txt +2 -1
.gitignore CHANGED
@@ -1,3 +1,3 @@
1
  __pycache__/
2
- copy_ch/
3
  copy_check/
 
1
  __pycache__/
2
+ venv/
3
  copy_check/
highlighter.py CHANGED
@@ -14,7 +14,7 @@ def explainer(text, model_type):
14
  sentences = [sent for sent in sent_tokenize(text)]
15
  num_sentences = len(sentences)
16
  exp = explainer_.explain_instance(
17
- text, predictor_wrapper, num_features=num_sentences, num_samples=500
18
  )
19
  weights_mapping = exp.as_map()[1]
20
  sentences_weights = {sentence: 0 for sentence in sentences}
 
14
  sentences = [sent for sent in sent_tokenize(text)]
15
  num_sentences = len(sentences)
16
  exp = explainer_.explain_instance(
17
+ text, predictor_wrapper, num_features=num_sentences, num_samples=2000
18
  )
19
  weights_mapping = exp.as_map()[1]
20
  sentences_weights = {sentence: 0 for sentence in sentences}
nohup.out CHANGED
@@ -1,221 +1,811 @@
1
- 2024-02-27 18:59:27.464755: I external/local_tsl/tsl/cuda/cudart_stub.cc:31] Could not find cuda drivers on your machine, GPU will not be used.
2
- 2024-02-27 18:59:27.511609: E external/local_xla/xla/stream_executor/cuda/cuda_dnn.cc:9261] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered
3
- 2024-02-27 18:59:27.511670: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:607] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered
4
- 2024-02-27 18:59:27.513106: E external/local_xla/xla/stream_executor/cuda/cuda_blas.cc:1515] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered
5
- 2024-02-27 18:59:27.520759: I external/local_tsl/tsl/cuda/cudart_stub.cc:31] Could not find cuda drivers on your machine, GPU will not be used.
6
- 2024-02-27 18:59:27.521010: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.
7
  To enable the following instructions: AVX2 FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.
8
- [nltk_data] Downloading package punkt to /root/nltk_data...
 
9
  [nltk_data] Package punkt is already up-to-date!
10
- [nltk_data] Downloading package punkt to /root/nltk_data...
11
  [nltk_data] Package punkt is already up-to-date!
12
-
13
-
14
-
15
-
16
-
17
-
 
 
 
 
 
18
  Traceback (most recent call last):
19
- File "/home/aliasgarov/copyright_checker/app.py", line 197, in <module>
20
- quillbot_model = AutoModelForSequenceClassification.from_pretrained("polygraf-ai/quillbot-detector-roberta-base-28K").to(device)
21
- File "/usr/local/lib/python3.9/dist-packages/transformers/models/auto/auto_factory.py", line 463, in from_pretrained
22
- return model_class.from_pretrained(
23
- File "/usr/local/lib/python3.9/dist-packages/transformers/modeling_utils.py", line 2182, in from_pretrained
24
- raise EnvironmentError(
25
- OSError: polygraf-ai/quillbot-detector-roberta-base-28K does not appear to have a file named pytorch_model.bin, tf_model.h5, model.ckpt or flax_model.msgpack.
26
- 2024-03-27 11:14:47.090743: E external/local_xla/xla/stream_executor/cuda/cuda_dnn.cc:9261] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered
27
- 2024-03-27 11:14:47.090847: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:607] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered
28
- 2024-03-27 11:14:47.092787: E external/local_xla/xla/stream_executor/cuda/cuda_blas.cc:1515] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered
29
- 2024-03-27 11:14:47.101435: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
30
  To enable the following instructions: AVX2 FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.
31
- 2024-03-27 11:14:48.367455: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT
32
- [nltk_data] Downloading package punkt to /root/nltk_data...
33
  [nltk_data] Package punkt is already up-to-date!
34
- [nltk_data] Downloading package punkt to /root/nltk_data...
35
  [nltk_data] Package punkt is already up-to-date!
36
- [nltk_data] Downloading package stopwords to /root/nltk_data...
 
37
  [nltk_data] Package stopwords is already up-to-date!
38
- [nltk_data] Downloading package punkt to /root/nltk_data...
39
  [nltk_data] Package punkt is already up-to-date!
40
- [nltk_data] Downloading package punkt to /root/nltk_data...
41
  [nltk_data] Package punkt is already up-to-date!
42
- [nltk_data] Downloading package stopwords to /root/nltk_data...
 
43
  [nltk_data] Package stopwords is already up-to-date!
44
- error: externally-managed-environment
45
-
46
- × This environment is externally managed
47
- ╰─> To install Python packages system-wide, try apt install
48
- python3-xyz, where xyz is the package you are trying to
49
- install.
50
-
51
- If you wish to install a non-Debian-packaged Python package,
52
- create a virtual environment using python3 -m venv path/to/venv.
53
- Then use path/to/venv/bin/python and path/to/venv/bin/pip. Make
54
- sure you have python3-full installed.
55
-
56
- If you wish to install a non-Debian packaged Python application,
57
- it may be easiest to use pipx install xyz, which will manage a
58
- virtual environment for you. Make sure you have pipx installed.
59
-
60
- See /usr/share/doc/python3.11/README.venv for more information.
61
-
62
- note: If you believe this is a mistake, please contact your Python installation or OS distribution provider. You can override this, at the risk of breaking your Python installation or OS, by passing --break-system-packages.
63
- hint: See PEP 668 for the detailed specification.
64
- 2024-03-27 11:38:14.074965: E external/local_xla/xla/stream_executor/cuda/cuda_dnn.cc:9261] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered
65
- 2024-03-27 11:38:14.075038: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:607] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered
66
- 2024-03-27 11:38:14.076732: E external/local_xla/xla/stream_executor/cuda/cuda_blas.cc:1515] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered
67
- 2024-03-27 11:38:14.085182: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
68
  To enable the following instructions: AVX2 FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.
69
- 2024-03-27 11:38:15.346441: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT
70
- [nltk_data] Downloading package punkt to /root/nltk_data...
71
  [nltk_data] Package punkt is already up-to-date!
72
- [nltk_data] Downloading package punkt to /root/nltk_data...
73
  [nltk_data] Package punkt is already up-to-date!
74
- [nltk_data] Downloading package stopwords to /root/nltk_data...
 
75
  [nltk_data] Package stopwords is already up-to-date!
76
- [nltk_data] Downloading package punkt to /root/nltk_data...
77
  [nltk_data] Package punkt is already up-to-date!
78
- [nltk_data] Downloading package punkt to /root/nltk_data...
79
  [nltk_data] Package punkt is already up-to-date!
80
- [nltk_data] Downloading package stopwords to /root/nltk_data...
 
81
  [nltk_data] Package stopwords is already up-to-date!
82
- error: externally-managed-environment
83
-
84
- × This environment is externally managed
85
- ╰─> To install Python packages system-wide, try apt install
86
- python3-xyz, where xyz is the package you are trying to
87
- install.
88
-
89
- If you wish to install a non-Debian-packaged Python package,
90
- create a virtual environment using python3 -m venv path/to/venv.
91
- Then use path/to/venv/bin/python and path/to/venv/bin/pip. Make
92
- sure you have python3-full installed.
93
-
94
- If you wish to install a non-Debian packaged Python application,
95
- it may be easiest to use pipx install xyz, which will manage a
96
- virtual environment for you. Make sure you have pipx installed.
97
-
98
- See /usr/share/doc/python3.11/README.venv for more information.
99
-
100
- note: If you believe this is a mistake, please contact your Python installation or OS distribution provider. You can override this, at the risk of breaking your Python installation or OS, by passing --break-system-packages.
101
- hint: See PEP 668 for the detailed specification.
102
- 2024-03-27 15:11:04.526493: E external/local_xla/xla/stream_executor/cuda/cuda_dnn.cc:9261] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered
103
- 2024-03-27 15:11:04.526578: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:607] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered
104
- 2024-03-27 15:11:04.528324: E external/local_xla/xla/stream_executor/cuda/cuda_blas.cc:1515] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered
105
- 2024-03-27 15:11:04.536839: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
106
  To enable the following instructions: AVX2 FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.
107
- 2024-03-27 15:11:05.847612: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT
108
- [nltk_data] Downloading package punkt to /root/nltk_data...
109
  [nltk_data] Package punkt is already up-to-date!
110
- [nltk_data] Downloading package punkt to /root/nltk_data...
111
  [nltk_data] Package punkt is already up-to-date!
112
- [nltk_data] Downloading package stopwords to /root/nltk_data...
 
113
  [nltk_data] Package stopwords is already up-to-date!
114
- [nltk_data] Downloading package punkt to /root/nltk_data...
115
  [nltk_data] Package punkt is already up-to-date!
116
- [nltk_data] Downloading package punkt to /root/nltk_data...
117
  [nltk_data] Package punkt is already up-to-date!
118
- [nltk_data] Downloading package stopwords to /root/nltk_data...
 
119
  [nltk_data] Package stopwords is already up-to-date!
120
- error: externally-managed-environment
121
-
122
- × This environment is externally managed
123
- ╰─> To install Python packages system-wide, try apt install
124
- python3-xyz, where xyz is the package you are trying to
125
- install.
126
-
127
- If you wish to install a non-Debian-packaged Python package,
128
- create a virtual environment using python3 -m venv path/to/venv.
129
- Then use path/to/venv/bin/python and path/to/venv/bin/pip. Make
130
- sure you have python3-full installed.
131
-
132
- If you wish to install a non-Debian packaged Python application,
133
- it may be easiest to use pipx install xyz, which will manage a
134
- virtual environment for you. Make sure you have pipx installed.
135
-
136
- See /usr/share/doc/python3.11/README.venv for more information.
137
-
138
- note: If you believe this is a mistake, please contact your Python installation or OS distribution provider. You can override this, at the risk of breaking your Python installation or OS, by passing --break-system-packages.
139
- hint: See PEP 668 for the detailed specification.
140
- /home/aliasgarov/copyright_checker/predictors.py:197: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
141
  probas = F.softmax(tensor_logits).detach().cpu().numpy()
142
- /home/aliasgarov/copyright_checker/predictors.py:197: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.
143
  probas = F.softmax(tensor_logits).detach().cpu().numpy()
144
- /home/aliasgarov/copyright_checker/predictors.py:197: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.
145
  probas = F.softmax(tensor_logits).detach().cpu().numpy()
146
- 2024-03-28 08:55:08.043046: E external/local_xla/xla/stream_executor/cuda/cuda_dnn.cc:9261] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered
147
- 2024-03-28 08:55:08.043133: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:607] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered
148
- 2024-03-28 08:55:08.044949: E external/local_xla/xla/stream_executor/cuda/cuda_blas.cc:1515] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered
149
- 2024-03-28 08:55:08.052957: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.
 
 
 
 
 
 
150
  To enable the following instructions: AVX2 FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.
151
- 2024-03-28 08:55:09.325222: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT
152
- [nltk_data] Downloading package punkt to /root/nltk_data...
153
  [nltk_data] Package punkt is already up-to-date!
154
- [nltk_data] Downloading package punkt to /root/nltk_data...
155
  [nltk_data] Package punkt is already up-to-date!
156
- [nltk_data] Downloading package stopwords to /root/nltk_data...
 
157
  [nltk_data] Package stopwords is already up-to-date!
158
- [nltk_data] Downloading package punkt to /root/nltk_data...
 
 
 
 
 
159
  [nltk_data] Package punkt is already up-to-date!
160
- [nltk_data] Downloading package punkt to /root/nltk_data...
161
  [nltk_data] Package punkt is already up-to-date!
162
- [nltk_data] Downloading package stopwords to /root/nltk_data...
 
163
  [nltk_data] Package stopwords is already up-to-date!
164
- error: externally-managed-environment
165
-
166
- × This environment is externally managed
167
- ╰─> To install Python packages system-wide, try apt install
168
- python3-xyz, where xyz is the package you are trying to
169
- install.
170
-
171
- If you wish to install a non-Debian-packaged Python package,
172
- create a virtual environment using python3 -m venv path/to/venv.
173
- Then use path/to/venv/bin/python and path/to/venv/bin/pip. Make
174
- sure you have python3-full installed.
175
-
176
- If you wish to install a non-Debian packaged Python application,
177
- it may be easiest to use pipx install xyz, which will manage a
178
- virtual environment for you. Make sure you have pipx installed.
179
-
180
- See /usr/share/doc/python3.11/README.venv for more information.
181
 
182
- note: If you believe this is a mistake, please contact your Python installation or OS distribution provider. You can override this, at the risk of breaking your Python installation or OS, by passing --break-system-packages.
183
- hint: See PEP 668 for the detailed specification.
184
- 2024-03-28 08:58:12.079411: E external/local_xla/xla/stream_executor/cuda/cuda_dnn.cc:9261] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered
185
- 2024-03-28 08:58:12.079483: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:607] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered
186
- 2024-03-28 08:58:12.081096: E external/local_xla/xla/stream_executor/cuda/cuda_blas.cc:1515] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered
187
- 2024-03-28 08:58:12.089611: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
188
  To enable the following instructions: AVX2 FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.
189
- 2024-03-28 08:58:13.377472: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT
190
- [nltk_data] Downloading package punkt to /root/nltk_data...
191
  [nltk_data] Package punkt is already up-to-date!
192
- [nltk_data] Downloading package punkt to /root/nltk_data...
193
  [nltk_data] Package punkt is already up-to-date!
194
- [nltk_data] Downloading package stopwords to /root/nltk_data...
 
195
  [nltk_data] Package stopwords is already up-to-date!
196
- [nltk_data] Downloading package punkt to /root/nltk_data...
 
 
 
 
 
197
  [nltk_data] Package punkt is already up-to-date!
198
- [nltk_data] Downloading package punkt to /root/nltk_data...
199
  [nltk_data] Package punkt is already up-to-date!
200
- [nltk_data] Downloading package stopwords to /root/nltk_data...
 
201
  [nltk_data] Package stopwords is already up-to-date!
202
- error: externally-managed-environment
203
-
204
- × This environment is externally managed
205
- ╰─> To install Python packages system-wide, try apt install
206
- python3-xyz, where xyz is the package you are trying to
207
- install.
208
-
209
- If you wish to install a non-Debian-packaged Python package,
210
- create a virtual environment using python3 -m venv path/to/venv.
211
- Then use path/to/venv/bin/python and path/to/venv/bin/pip. Make
212
- sure you have python3-full installed.
213
-
214
- If you wish to install a non-Debian packaged Python application,
215
- it may be easiest to use pipx install xyz, which will manage a
216
- virtual environment for you. Make sure you have pipx installed.
217
-
218
- See /usr/share/doc/python3.11/README.venv for more information.
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
219
 
220
- note: If you believe this is a mistake, please contact your Python installation or OS distribution provider. You can override this, at the risk of breaking your Python installation or OS, by passing --break-system-packages.
221
- hint: See PEP 668 for the detailed specification.
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2024-03-29 14:20:49.463961: I tensorflow/core/platform/cpu_feature_guard.cc:210] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.
 
 
 
 
 
2
  To enable the following instructions: AVX2 FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.
3
+ 2024-03-29 14:20:50.507795: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT
4
+ [nltk_data] Downloading package punkt to /home/aliasgarov/nltk_data...
5
  [nltk_data] Package punkt is already up-to-date!
6
+ [nltk_data] Downloading package punkt to /home/aliasgarov/nltk_data...
7
  [nltk_data] Package punkt is already up-to-date!
8
+ [nltk_data] Downloading package stopwords to
9
+ [nltk_data] /home/aliasgarov/nltk_data...
10
+ [nltk_data] Package stopwords is already up-to-date!
11
+ [nltk_data] Downloading package punkt to /home/aliasgarov/nltk_data...
12
+ [nltk_data] Package punkt is already up-to-date!
13
+ [nltk_data] Downloading package punkt to /home/aliasgarov/nltk_data...
14
+ [nltk_data] Package punkt is already up-to-date!
15
+ [nltk_data] Downloading package stopwords to
16
+ [nltk_data] /home/aliasgarov/nltk_data...
17
+ [nltk_data] Package stopwords is already up-to-date!
18
+ /usr/bin/python3: No module named spacy
19
  Traceback (most recent call last):
20
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/gradio/queueing.py", line 522, in process_events
21
+ response = await route_utils.call_process_api(
22
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/gradio/route_utils.py", line 260, in call_process_api
23
+ output = await app.get_blocks().process_api(
24
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/gradio/blocks.py", line 1689, in process_api
25
+ result = await self.call_function(
26
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/gradio/blocks.py", line 1255, in call_function
27
+ prediction = await anyio.to_thread.run_sync(
28
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/anyio/to_thread.py", line 56, in run_sync
29
+ return await get_async_backend().run_sync_in_worker_thread(
30
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 2144, in run_sync_in_worker_thread
31
+ return await future
32
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 851, in run
33
+ result = context.run(func, *args)
34
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/gradio/utils.py", line 750, in wrapper
35
+ response = f(*args, **kwargs)
36
+ File "/home/aliasgarov/copyright_checker/highlighter.py", line 31, in analyze_and_highlight
37
+ sentences_weights, _ = explainer(text, model_type)
38
+ File "/home/aliasgarov/copyright_checker/highlighter.py", line 16, in explainer
39
+ exp = explainer_.explain_instance(
40
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/lime/lime_text.py", line 413, in explain_instance
41
+ data, yss, distances = self.__data_labels_distances(
42
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/lime/lime_text.py", line 482, in __data_labels_distances
43
+ labels = classifier_fn(inverse_data)
44
+ File "/home/aliasgarov/copyright_checker/highlighter.py", line 8, in predictor_wrapper
45
+ return predict_for_explainanility(text=text, model_type=model_type)
46
+ File "/home/aliasgarov/copyright_checker/predictors.py", line 195, in predict_for_explainanility
47
+ outputs = model(**tokenized_text)
48
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1511, in _wrapped_call_impl
49
+ return self._call_impl(*args, **kwargs)
50
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1520, in _call_impl
51
+ return forward_call(*args, **kwargs)
52
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/transformers/models/bert/modeling_bert.py", line 1564, in forward
53
+ outputs = self.bert(
54
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1511, in _wrapped_call_impl
55
+ return self._call_impl(*args, **kwargs)
56
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1520, in _call_impl
57
+ return forward_call(*args, **kwargs)
58
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/transformers/models/bert/modeling_bert.py", line 1013, in forward
59
+ encoder_outputs = self.encoder(
60
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1511, in _wrapped_call_impl
61
+ return self._call_impl(*args, **kwargs)
62
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1520, in _call_impl
63
+ return forward_call(*args, **kwargs)
64
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/transformers/models/bert/modeling_bert.py", line 607, in forward
65
+ layer_outputs = layer_module(
66
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1511, in _wrapped_call_impl
67
+ return self._call_impl(*args, **kwargs)
68
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1520, in _call_impl
69
+ return forward_call(*args, **kwargs)
70
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/transformers/models/bert/modeling_bert.py", line 497, in forward
71
+ self_attention_outputs = self.attention(
72
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1511, in _wrapped_call_impl
73
+ return self._call_impl(*args, **kwargs)
74
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1520, in _call_impl
75
+ return forward_call(*args, **kwargs)
76
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/transformers/models/bert/modeling_bert.py", line 427, in forward
77
+ self_outputs = self.self(
78
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1511, in _wrapped_call_impl
79
+ return self._call_impl(*args, **kwargs)
80
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1520, in _call_impl
81
+ return forward_call(*args, **kwargs)
82
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/transformers/models/bert/modeling_bert.py", line 325, in forward
83
+ attention_scores = torch.matmul(query_layer, key_layer.transpose(-1, -2))
84
+ torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 5.86 GiB. GPU 0 has a total capacity of 14.58 GiB of which 1.76 GiB is free. Including non-PyTorch memory, this process has 12.81 GiB memory in use. Of the allocated memory 11.71 GiB is allocated by PyTorch, and 1008.80 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables)
85
+ 2024-03-29 14:31:17.459384: I tensorflow/core/platform/cpu_feature_guard.cc:210] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.
86
  To enable the following instructions: AVX2 FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.
87
+ 2024-03-29 14:31:18.518981: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT
88
+ [nltk_data] Downloading package punkt to /home/aliasgarov/nltk_data...
89
  [nltk_data] Package punkt is already up-to-date!
90
+ [nltk_data] Downloading package punkt to /home/aliasgarov/nltk_data...
91
  [nltk_data] Package punkt is already up-to-date!
92
+ [nltk_data] Downloading package stopwords to
93
+ [nltk_data] /home/aliasgarov/nltk_data...
94
  [nltk_data] Package stopwords is already up-to-date!
95
+ [nltk_data] Downloading package punkt to /home/aliasgarov/nltk_data...
96
  [nltk_data] Package punkt is already up-to-date!
97
+ [nltk_data] Downloading package punkt to /home/aliasgarov/nltk_data...
98
  [nltk_data] Package punkt is already up-to-date!
99
+ [nltk_data] Downloading package stopwords to
100
+ [nltk_data] /home/aliasgarov/nltk_data...
101
  [nltk_data] Package stopwords is already up-to-date!
102
+ /usr/bin/python3: No module named spacy
103
+ Traceback (most recent call last):
104
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/gradio/queueing.py", line 522, in process_events
105
+ response = await route_utils.call_process_api(
106
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/gradio/route_utils.py", line 260, in call_process_api
107
+ output = await app.get_blocks().process_api(
108
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/gradio/blocks.py", line 1689, in process_api
109
+ result = await self.call_function(
110
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/gradio/blocks.py", line 1255, in call_function
111
+ prediction = await anyio.to_thread.run_sync(
112
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/anyio/to_thread.py", line 56, in run_sync
113
+ return await get_async_backend().run_sync_in_worker_thread(
114
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 2144, in run_sync_in_worker_thread
115
+ return await future
116
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 851, in run
117
+ result = context.run(func, *args)
118
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/gradio/utils.py", line 750, in wrapper
119
+ response = f(*args, **kwargs)
120
+ File "/home/aliasgarov/copyright_checker/highlighter.py", line 31, in analyze_and_highlight
121
+ sentences_weights, _ = explainer(text, model_type)
122
+ File "/home/aliasgarov/copyright_checker/highlighter.py", line 16, in explainer
123
+ exp = explainer_.explain_instance(
124
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/lime/lime_text.py", line 413, in explain_instance
125
+ data, yss, distances = self.__data_labels_distances(
126
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/lime/lime_text.py", line 482, in __data_labels_distances
127
+ labels = classifier_fn(inverse_data)
128
+ File "/home/aliasgarov/copyright_checker/highlighter.py", line 8, in predictor_wrapper
129
+ return predict_for_explainanility(text=text, model_type=model_type)
130
+ File "/home/aliasgarov/copyright_checker/predictors.py", line 195, in predict_for_explainanility
131
+ outputs = model(**tokenized_text)
132
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1511, in _wrapped_call_impl
133
+ return self._call_impl(*args, **kwargs)
134
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1520, in _call_impl
135
+ return forward_call(*args, **kwargs)
136
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/transformers/models/bert/modeling_bert.py", line 1564, in forward
137
+ outputs = self.bert(
138
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1511, in _wrapped_call_impl
139
+ return self._call_impl(*args, **kwargs)
140
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1520, in _call_impl
141
+ return forward_call(*args, **kwargs)
142
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/transformers/models/bert/modeling_bert.py", line 1013, in forward
143
+ encoder_outputs = self.encoder(
144
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1511, in _wrapped_call_impl
145
+ return self._call_impl(*args, **kwargs)
146
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1520, in _call_impl
147
+ return forward_call(*args, **kwargs)
148
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/transformers/models/bert/modeling_bert.py", line 607, in forward
149
+ layer_outputs = layer_module(
150
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1511, in _wrapped_call_impl
151
+ return self._call_impl(*args, **kwargs)
152
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1520, in _call_impl
153
+ return forward_call(*args, **kwargs)
154
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/transformers/models/bert/modeling_bert.py", line 497, in forward
155
+ self_attention_outputs = self.attention(
156
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1511, in _wrapped_call_impl
157
+ return self._call_impl(*args, **kwargs)
158
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1520, in _call_impl
159
+ return forward_call(*args, **kwargs)
160
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/transformers/models/bert/modeling_bert.py", line 427, in forward
161
+ self_outputs = self.self(
162
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1511, in _wrapped_call_impl
163
+ return self._call_impl(*args, **kwargs)
164
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1520, in _call_impl
165
+ return forward_call(*args, **kwargs)
166
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/transformers/models/bert/modeling_bert.py", line 325, in forward
167
+ attention_scores = torch.matmul(query_layer, key_layer.transpose(-1, -2))
168
+ torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 5.86 GiB. GPU 0 has a total capacity of 14.58 GiB of which 2.47 GiB is free. Including non-PyTorch memory, this process has 12.10 GiB memory in use. Of the allocated memory 11.71 GiB is allocated by PyTorch, and 278.80 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables)
169
+ 2024-03-29 14:36:15.933048: I tensorflow/core/platform/cpu_feature_guard.cc:210] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.
170
  To enable the following instructions: AVX2 FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.
171
+ 2024-03-29 14:36:16.966744: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT
172
+ [nltk_data] Downloading package punkt to /home/aliasgarov/nltk_data...
173
  [nltk_data] Package punkt is already up-to-date!
174
+ [nltk_data] Downloading package punkt to /home/aliasgarov/nltk_data...
175
  [nltk_data] Package punkt is already up-to-date!
176
+ [nltk_data] Downloading package stopwords to
177
+ [nltk_data] /home/aliasgarov/nltk_data...
178
  [nltk_data] Package stopwords is already up-to-date!
179
+ [nltk_data] Downloading package punkt to /home/aliasgarov/nltk_data...
180
  [nltk_data] Package punkt is already up-to-date!
181
+ [nltk_data] Downloading package punkt to /home/aliasgarov/nltk_data...
182
  [nltk_data] Package punkt is already up-to-date!
183
+ [nltk_data] Downloading package stopwords to
184
+ [nltk_data] /home/aliasgarov/nltk_data...
185
  [nltk_data] Package stopwords is already up-to-date!
186
+ /usr/bin/python3: No module named spacy
187
+ Traceback (most recent call last):
188
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/gradio/queueing.py", line 522, in process_events
189
+ response = await route_utils.call_process_api(
190
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/gradio/route_utils.py", line 260, in call_process_api
191
+ output = await app.get_blocks().process_api(
192
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/gradio/blocks.py", line 1689, in process_api
193
+ result = await self.call_function(
194
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/gradio/blocks.py", line 1255, in call_function
195
+ prediction = await anyio.to_thread.run_sync(
196
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/anyio/to_thread.py", line 56, in run_sync
197
+ return await get_async_backend().run_sync_in_worker_thread(
198
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 2144, in run_sync_in_worker_thread
199
+ return await future
200
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 851, in run
201
+ result = context.run(func, *args)
202
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/gradio/utils.py", line 750, in wrapper
203
+ response = f(*args, **kwargs)
204
+ File "/home/aliasgarov/copyright_checker/highlighter.py", line 31, in analyze_and_highlight
205
+ sentences_weights, _ = explainer(text, model_type)
206
+ File "/home/aliasgarov/copyright_checker/highlighter.py", line 16, in explainer
207
+ exp = explainer_.explain_instance(
208
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/lime/lime_text.py", line 413, in explain_instance
209
+ data, yss, distances = self.__data_labels_distances(
210
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/lime/lime_text.py", line 482, in __data_labels_distances
211
+ labels = classifier_fn(inverse_data)
212
+ File "/home/aliasgarov/copyright_checker/highlighter.py", line 8, in predictor_wrapper
213
+ return predict_for_explainanility(text=text, model_type=model_type)
214
+ File "/home/aliasgarov/copyright_checker/predictors.py", line 195, in predict_for_explainanility
215
+ outputs = model(**tokenized_text)
216
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1511, in _wrapped_call_impl
217
+ return self._call_impl(*args, **kwargs)
218
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1520, in _call_impl
219
+ return forward_call(*args, **kwargs)
220
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/transformers/models/bert/modeling_bert.py", line 1564, in forward
221
+ outputs = self.bert(
222
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1511, in _wrapped_call_impl
223
+ return self._call_impl(*args, **kwargs)
224
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1520, in _call_impl
225
+ return forward_call(*args, **kwargs)
226
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/transformers/models/bert/modeling_bert.py", line 1013, in forward
227
+ encoder_outputs = self.encoder(
228
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1511, in _wrapped_call_impl
229
+ return self._call_impl(*args, **kwargs)
230
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1520, in _call_impl
231
+ return forward_call(*args, **kwargs)
232
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/transformers/models/bert/modeling_bert.py", line 607, in forward
233
+ layer_outputs = layer_module(
234
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1511, in _wrapped_call_impl
235
+ return self._call_impl(*args, **kwargs)
236
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1520, in _call_impl
237
+ return forward_call(*args, **kwargs)
238
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/transformers/models/bert/modeling_bert.py", line 497, in forward
239
+ self_attention_outputs = self.attention(
240
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1511, in _wrapped_call_impl
241
+ return self._call_impl(*args, **kwargs)
242
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1520, in _call_impl
243
+ return forward_call(*args, **kwargs)
244
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/transformers/models/bert/modeling_bert.py", line 427, in forward
245
+ self_outputs = self.self(
246
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1511, in _wrapped_call_impl
247
+ return self._call_impl(*args, **kwargs)
248
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1520, in _call_impl
249
+ return forward_call(*args, **kwargs)
250
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/transformers/models/bert/modeling_bert.py", line 325, in forward
251
+ attention_scores = torch.matmul(query_layer, key_layer.transpose(-1, -2))
252
+ torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 5.86 GiB. GPU 0 has a total capacity of 14.58 GiB of which 5.63 GiB is free. Including non-PyTorch memory, this process has 8.95 GiB memory in use. Of the allocated memory 8.59 GiB is allocated by PyTorch, and 234.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables)
253
+ 2024-03-29 14:38:49.739939: I tensorflow/core/platform/cpu_feature_guard.cc:210] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.
254
  To enable the following instructions: AVX2 FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.
255
+ 2024-03-29 14:38:50.770137: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT
256
+ [nltk_data] Downloading package punkt to /home/aliasgarov/nltk_data...
257
  [nltk_data] Package punkt is already up-to-date!
258
+ [nltk_data] Downloading package punkt to /home/aliasgarov/nltk_data...
259
  [nltk_data] Package punkt is already up-to-date!
260
+ [nltk_data] Downloading package stopwords to
261
+ [nltk_data] /home/aliasgarov/nltk_data...
262
  [nltk_data] Package stopwords is already up-to-date!
263
+ [nltk_data] Downloading package punkt to /home/aliasgarov/nltk_data...
264
  [nltk_data] Package punkt is already up-to-date!
265
+ [nltk_data] Downloading package punkt to /home/aliasgarov/nltk_data...
266
  [nltk_data] Package punkt is already up-to-date!
267
+ [nltk_data] Downloading package stopwords to
268
+ [nltk_data] /home/aliasgarov/nltk_data...
269
  [nltk_data] Package stopwords is already up-to-date!
270
+ /usr/bin/python3: No module named spacy
271
+ Traceback (most recent call last):
272
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/gradio/queueing.py", line 522, in process_events
273
+ response = await route_utils.call_process_api(
274
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/gradio/route_utils.py", line 260, in call_process_api
275
+ output = await app.get_blocks().process_api(
276
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/gradio/blocks.py", line 1689, in process_api
277
+ result = await self.call_function(
278
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/gradio/blocks.py", line 1255, in call_function
279
+ prediction = await anyio.to_thread.run_sync(
280
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/anyio/to_thread.py", line 56, in run_sync
281
+ return await get_async_backend().run_sync_in_worker_thread(
282
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 2144, in run_sync_in_worker_thread
283
+ return await future
284
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 851, in run
285
+ result = context.run(func, *args)
286
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/gradio/utils.py", line 750, in wrapper
287
+ response = f(*args, **kwargs)
288
+ File "/home/aliasgarov/copyright_checker/highlighter.py", line 31, in analyze_and_highlight
289
+ sentences_weights, _ = explainer(text, model_type)
290
+ File "/home/aliasgarov/copyright_checker/highlighter.py", line 16, in explainer
291
+ exp = explainer_.explain_instance(
292
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/lime/lime_text.py", line 413, in explain_instance
293
+ data, yss, distances = self.__data_labels_distances(
294
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/lime/lime_text.py", line 482, in __data_labels_distances
295
+ labels = classifier_fn(inverse_data)
296
+ File "/home/aliasgarov/copyright_checker/highlighter.py", line 8, in predictor_wrapper
297
+ return predict_for_explainanility(text=text, model_type=model_type)
298
+ File "/home/aliasgarov/copyright_checker/predictors.py", line 195, in predict_for_explainanility
299
+ outputs = model(**tokenized_text)
300
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1511, in _wrapped_call_impl
301
+ return self._call_impl(*args, **kwargs)
302
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1520, in _call_impl
303
+ return forward_call(*args, **kwargs)
304
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/transformers/models/bert/modeling_bert.py", line 1564, in forward
305
+ outputs = self.bert(
306
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1511, in _wrapped_call_impl
307
+ return self._call_impl(*args, **kwargs)
308
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1520, in _call_impl
309
+ return forward_call(*args, **kwargs)
310
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/transformers/models/bert/modeling_bert.py", line 1013, in forward
311
+ encoder_outputs = self.encoder(
312
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1511, in _wrapped_call_impl
313
+ return self._call_impl(*args, **kwargs)
314
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1520, in _call_impl
315
+ return forward_call(*args, **kwargs)
316
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/transformers/models/bert/modeling_bert.py", line 607, in forward
317
+ layer_outputs = layer_module(
318
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1511, in _wrapped_call_impl
319
+ return self._call_impl(*args, **kwargs)
320
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1520, in _call_impl
321
+ return forward_call(*args, **kwargs)
322
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/transformers/models/bert/modeling_bert.py", line 497, in forward
323
+ self_attention_outputs = self.attention(
324
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1511, in _wrapped_call_impl
325
+ return self._call_impl(*args, **kwargs)
326
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1520, in _call_impl
327
+ return forward_call(*args, **kwargs)
328
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/transformers/models/bert/modeling_bert.py", line 427, in forward
329
+ self_outputs = self.self(
330
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1511, in _wrapped_call_impl
331
+ return self._call_impl(*args, **kwargs)
332
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1520, in _call_impl
333
+ return forward_call(*args, **kwargs)
334
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/transformers/models/bert/modeling_bert.py", line 365, in forward
335
+ context_layer = torch.matmul(attention_probs, value_layer)
336
+ torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 500.00 MiB. GPU 0 has a total capacity of 14.58 GiB of which 285.56 MiB is free. Including non-PyTorch memory, this process has 14.30 GiB memory in use. Of the allocated memory 13.96 GiB is allocated by PyTorch, and 222.09 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables)
337
+ 2024-03-29 14:42:21.299532: I tensorflow/core/platform/cpu_feature_guard.cc:210] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.
338
+ To enable the following instructions: AVX2 FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.
339
+ 2024-03-29 14:42:22.362964: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT
340
+ [nltk_data] Downloading package punkt to /home/aliasgarov/nltk_data...
341
+ [nltk_data] Package punkt is already up-to-date!
342
+ [nltk_data] Downloading package punkt to /home/aliasgarov/nltk_data...
343
+ [nltk_data] Package punkt is already up-to-date!
344
+ [nltk_data] Downloading package stopwords to
345
+ [nltk_data] /home/aliasgarov/nltk_data...
346
+ [nltk_data] Package stopwords is already up-to-date!
347
+ [nltk_data] Downloading package punkt to /home/aliasgarov/nltk_data...
348
+ [nltk_data] Package punkt is already up-to-date!
349
+ [nltk_data] Downloading package punkt to /home/aliasgarov/nltk_data...
350
+ [nltk_data] Package punkt is already up-to-date!
351
+ [nltk_data] Downloading package stopwords to
352
+ [nltk_data] /home/aliasgarov/nltk_data...
353
+ [nltk_data] Package stopwords is already up-to-date!
354
+ /usr/bin/python3: No module named spacy
355
+ Traceback (most recent call last):
356
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/gradio/queueing.py", line 522, in process_events
357
+ response = await route_utils.call_process_api(
358
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/gradio/route_utils.py", line 260, in call_process_api
359
+ output = await app.get_blocks().process_api(
360
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/gradio/blocks.py", line 1689, in process_api
361
+ result = await self.call_function(
362
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/gradio/blocks.py", line 1255, in call_function
363
+ prediction = await anyio.to_thread.run_sync(
364
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/anyio/to_thread.py", line 56, in run_sync
365
+ return await get_async_backend().run_sync_in_worker_thread(
366
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 2144, in run_sync_in_worker_thread
367
+ return await future
368
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 851, in run
369
+ result = context.run(func, *args)
370
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/gradio/utils.py", line 750, in wrapper
371
+ response = f(*args, **kwargs)
372
+ File "/home/aliasgarov/copyright_checker/highlighter.py", line 31, in analyze_and_highlight
373
+ sentences_weights, _ = explainer(text, model_type)
374
+ File "/home/aliasgarov/copyright_checker/highlighter.py", line 16, in explainer
375
+ exp = explainer_.explain_instance(
376
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/lime/lime_text.py", line 413, in explain_instance
377
+ data, yss, distances = self.__data_labels_distances(
378
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/lime/lime_text.py", line 482, in __data_labels_distances
379
+ labels = classifier_fn(inverse_data)
380
+ File "/home/aliasgarov/copyright_checker/highlighter.py", line 8, in predictor_wrapper
381
+ return predict_for_explainanility(text=text, model_type=model_type)
382
+ File "/home/aliasgarov/copyright_checker/predictors.py", line 195, in predict_for_explainanility
383
+ outputs = model(**tokenized_text)
384
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1511, in _wrapped_call_impl
385
+ return self._call_impl(*args, **kwargs)
386
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1520, in _call_impl
387
+ return forward_call(*args, **kwargs)
388
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/transformers/models/bert/modeling_bert.py", line 1564, in forward
389
+ outputs = self.bert(
390
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1511, in _wrapped_call_impl
391
+ return self._call_impl(*args, **kwargs)
392
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1520, in _call_impl
393
+ return forward_call(*args, **kwargs)
394
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/transformers/models/bert/modeling_bert.py", line 1013, in forward
395
+ encoder_outputs = self.encoder(
396
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1511, in _wrapped_call_impl
397
+ return self._call_impl(*args, **kwargs)
398
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1520, in _call_impl
399
+ return forward_call(*args, **kwargs)
400
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/transformers/models/bert/modeling_bert.py", line 607, in forward
401
+ layer_outputs = layer_module(
402
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1511, in _wrapped_call_impl
403
+ return self._call_impl(*args, **kwargs)
404
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1520, in _call_impl
405
+ return forward_call(*args, **kwargs)
406
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/transformers/models/bert/modeling_bert.py", line 497, in forward
407
+ self_attention_outputs = self.attention(
408
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1511, in _wrapped_call_impl
409
+ return self._call_impl(*args, **kwargs)
410
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1520, in _call_impl
411
+ return forward_call(*args, **kwargs)
412
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/transformers/models/bert/modeling_bert.py", line 427, in forward
413
+ self_outputs = self.self(
414
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1511, in _wrapped_call_impl
415
+ return self._call_impl(*args, **kwargs)
416
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1520, in _call_impl
417
+ return forward_call(*args, **kwargs)
418
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/transformers/models/bert/modeling_bert.py", line 365, in forward
419
+ context_layer = torch.matmul(attention_probs, value_layer)
420
+ torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 500.00 MiB. GPU 0 has a total capacity of 14.58 GiB of which 285.56 MiB is free. Including non-PyTorch memory, this process has 14.30 GiB memory in use. Of the allocated memory 13.96 GiB is allocated by PyTorch, and 222.09 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables)
421
+ 2024-03-29 14:48:12.298004: I tensorflow/core/platform/cpu_feature_guard.cc:210] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.
422
+ To enable the following instructions: AVX2 FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.
423
+ 2024-03-29 14:48:13.329416: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT
424
+ [nltk_data] Downloading package punkt to /home/aliasgarov/nltk_data...
425
+ [nltk_data] Package punkt is already up-to-date!
426
+ [nltk_data] Downloading package punkt to /home/aliasgarov/nltk_data...
427
+ [nltk_data] Package punkt is already up-to-date!
428
+ [nltk_data] Downloading package stopwords to
429
+ [nltk_data] /home/aliasgarov/nltk_data...
430
+ [nltk_data] Package stopwords is already up-to-date!
431
+ [nltk_data] Downloading package punkt to /home/aliasgarov/nltk_data...
432
+ [nltk_data] Package punkt is already up-to-date!
433
+ [nltk_data] Downloading package punkt to /home/aliasgarov/nltk_data...
434
+ [nltk_data] Package punkt is already up-to-date!
435
+ [nltk_data] Downloading package stopwords to
436
+ [nltk_data] /home/aliasgarov/nltk_data...
437
+ [nltk_data] Package stopwords is already up-to-date!
438
+ /usr/bin/python3: No module named spacy
439
+ /home/aliasgarov/copyright_checker/predictors.py:198: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.
440
+ probas = F.softmax(tensor_logits).detach().cpu().numpy()
441
+ /home/aliasgarov/copyright_checker/predictors.py:198: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.
442
+ probas = F.softmax(tensor_logits).detach().cpu().numpy()
443
+ /home/aliasgarov/copyright_checker/predictors.py:198: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.
444
+ probas = F.softmax(tensor_logits).detach().cpu().numpy()
445
+ /home/aliasgarov/copyright_checker/predictors.py:198: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.
446
  probas = F.softmax(tensor_logits).detach().cpu().numpy()
447
+ /home/aliasgarov/copyright_checker/predictors.py:198: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.
448
  probas = F.softmax(tensor_logits).detach().cpu().numpy()
449
+ /home/aliasgarov/copyright_checker/predictors.py:198: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.
450
  probas = F.softmax(tensor_logits).detach().cpu().numpy()
451
+ Running on local URL: http://0.0.0.0:80
452
+ Running on public URL: https://008ca76c2bb7f8d8a3.gradio.live
453
+
454
+ This share link expires in 72 hours. For free permanent hosting and GPU upgrades, run `gradio deploy` from Terminal to deploy to Spaces (https://huggingface.co/spaces)
455
+ {'Lou Henry Hoover (March 29, 1874 – January 7, 1944) was the first lady of the United States from 1929 to 1933 as the wife of President Herbert Hoover.': -0.007239958671520294, 'She was active in community groups, including the Girl Scouts of the USA, which she led from 1922 to 1925 and from 1935 to 1937.': -0.005940472653387939, 'She was the first woman to earn a geology degree from Stanford.': 0.0016036026130179831, 'In the first twenty years of their marriage, the Hoovers lived in several countries; during World War I, they led efforts to assist war refugees.': 0.001537302361237576, 'Beginning in 1917, they lived in Washington, D.C., as Herbert became a high government official.': -0.007227867941129461, 'In the White House, Lou Hoover dedicated her time as first lady to her volunteer work, though she did not publicize it.': -0.0003420683510965876, 'Her invitation of Jessie De Priest to the White House for tea was controversial in the South.': -0.009836457467468768, "After Herbert's defeat for re-election in 1932, Lou Hoover continued her work, helping provide refugee support with her husband during World War II, and died suddenly of a heart attack in 1944.": 0.005398886066759868} bc
456
+ {'Lou Henry Hoover (March 29, 1874 – January 7, 1944) was the first lady of the United States from 1929 to 1933 as the wife of President Herbert Hoover.': -0.007024535420434749, 'She was active in community groups, including the Girl Scouts of the USA, which she led from 1922 to 1925 and from 1935 to 1937.': -0.005433933632620999, 'She was the first woman to earn a geology degree from Stanford.': 0.0033503657592824465, 'In the first twenty years of their marriage, the Hoovers lived in several countries; during World War I, they led efforts to assist war refugees.': 0.0012667157053936522, 'Beginning in 1917, they lived in Washington, D.C., as Herbert became a high government official.': -0.007406581188202247, 'In the White House, Lou Hoover dedicated her time as first lady to her volunteer work, though she did not publicize it.': -0.0006685564234160865, 'Her invitation of Jessie De Priest to the White House for tea was controversial in the South.': -0.009190228364350466, "After Herbert's defeat for re-election in 1932, Lou Hoover continued her work, helping provide refugee support with her husband during World War II, and died suddenly of a heart attack in 1944.": 0.004699842541408435} bc
457
+ {'Lou Henry Hoover (March 29, 1874 – January 7, 1944) was the first lady of the United States from 1929 to 1933 as the wife of President Herbert Hoover.': -0.641953608456155, 'She was active in community groups, including the Girl Scouts of the USA, which she led from 1922 to 1925 and from 1935 to 1937.': 0.020200923452086798, 'She was the first woman to earn a geology degree from Stanford.': 0.008136189058261252, 'In the first twenty years of their marriage, the Hoovers lived in several countries; during World War I, they led efforts to assist war refugees.': 0.12504063362482074, 'Beginning in 1917, they lived in Washington, D.C., as Herbert became a high government official.': 0.14466029601373961, 'In the White House, Lou Hoover dedicated her time as first lady to her volunteer work, though she did not publicize it.': 0.045496763632525375, 'Her invitation of Jessie De Priest to the White House for tea was controversial in the South.': 0.11435786746768793, "After Herbert's defeat for re-election in 1932, Lou Hoover continued her work, helping provide refugee support with her husband during World War II, and died suddenly of a heart attack in 1944.": 0.3560611292221768} quillbot
458
+ {'Lou Henry Hoover (March 29, 1874 – January 7, 1944) was the first lady of the United States from 1929 to 1933 as the wife of President Herbert Hoover.': -0.049232424744256965, 'She was active in community groups, including the Girl Scouts of the USA, which she led from 1922 to 1925 and from 1935 to 1937.': -0.0808599351295588, 'She was the first woman to earn a geology degree from Stanford.': -0.028306312264799082, 'In the first twenty years of their marriage, the Hoovers lived in several countries; during World War I, they led efforts to assist war refugees.': 0.018576473883078034, 'Beginning in 1917, they lived in Washington, D.C., as Herbert became a high government official.': -0.0658758038308371, 'In the White House, Lou Hoover dedicated her time as first lady to her volunteer work, though she did not publicize it.': 0.00520141594810037, 'Her invitation of Jessie De Priest to the White House for tea was controversial in the South.': -0.06700218547318215, "After Herbert's defeat for re-election in 1932, Lou Hoover continued her work, helping provide refugee support with her husband during World War II, and died suddenly of a heart attack in 1944.": 0.11886694361432464} bc
459
+ {'Lou Henry Hoover (March 29, 1874 – January 7, 1944) was the first lady of the United States from 1929 to 1933 as the wife of President Herbert Hoover.': -0.07048027659860119, 'She was active in community groups, including the Girl Scouts of the USA, which she led from 1922 to 1925 and from 1935 to 1937.': -0.07512228868644406, 'She was the first woman to earn a geology degree from Stanford.': -0.04560898943130033, 'In the first twenty years of their marriage, the Hoovers lived in several countries; during World War I, they led efforts to assist war refugees.': 0.01102573043004705, 'Beginning in 1917, they lived in Washington, D.C., as Herbert became a high government official.': -0.06753051178176432, 'In the White House, Lou Hoover dedicated her time as first lady to her volunteer work, though she did not publicize it.': -0.0016847880819046478, 'Her invitation of Jessie De Priest to the White House for tea was controversial in the South.': -0.06913938144762188, "After Herbert's defeat for re-election in 1932, Lou Hoover continued her work, helping provide refugee support with her husband during World War II, and died suddenly of a heart attack in 1944.": 0.13576338155813136} bc
460
+ 2024-03-29 15:01:50.768841: I tensorflow/core/platform/cpu_feature_guard.cc:210] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.
461
  To enable the following instructions: AVX2 FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.
462
+ 2024-03-29 15:01:51.796519: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT
463
+ [nltk_data] Downloading package punkt to /home/aliasgarov/nltk_data...
464
  [nltk_data] Package punkt is already up-to-date!
465
+ [nltk_data] Downloading package punkt to /home/aliasgarov/nltk_data...
466
  [nltk_data] Package punkt is already up-to-date!
467
+ [nltk_data] Downloading package stopwords to
468
+ [nltk_data] /home/aliasgarov/nltk_data...
469
  [nltk_data] Package stopwords is already up-to-date!
470
+ The BetterTransformer implementation does not support padding during training, as the fused kernels do not support attention masks. Beware that passing padded batched data during training may result in unexpected outputs. Please refer to https://huggingface.co/docs/optimum/bettertransformer/overview for more details.
471
+ The BetterTransformer implementation does not support padding during training, as the fused kernels do not support attention masks. Beware that passing padded batched data during training may result in unexpected outputs. Please refer to https://huggingface.co/docs/optimum/bettertransformer/overview for more details.
472
+ The BetterTransformer implementation does not support padding during training, as the fused kernels do not support attention masks. Beware that passing padded batched data during training may result in unexpected outputs. Please refer to https://huggingface.co/docs/optimum/bettertransformer/overview for more details.
473
+ The BetterTransformer implementation does not support padding during training, as the fused kernels do not support attention masks. Beware that passing padded batched data during training may result in unexpected outputs. Please refer to https://huggingface.co/docs/optimum/bettertransformer/overview for more details.
474
+ The BetterTransformer implementation does not support padding during training, as the fused kernels do not support attention masks. Beware that passing padded batched data during training may result in unexpected outputs. Please refer to https://huggingface.co/docs/optimum/bettertransformer/overview for more details.
475
+ [nltk_data] Downloading package punkt to /home/aliasgarov/nltk_data...
476
  [nltk_data] Package punkt is already up-to-date!
477
+ [nltk_data] Downloading package punkt to /home/aliasgarov/nltk_data...
478
  [nltk_data] Package punkt is already up-to-date!
479
+ [nltk_data] Downloading package stopwords to
480
+ [nltk_data] /home/aliasgarov/nltk_data...
481
  [nltk_data] Package stopwords is already up-to-date!
482
+ /usr/bin/python3: No module named spacy
483
+ /home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/optimum/bettertransformer/models/encoder_models.py:301: UserWarning: The PyTorch API of nested tensors is in prototype stage and will change in the near future. (Triggered internally at ../aten/src/ATen/NestedTensorImpl.cpp:177.)
484
+ hidden_states = torch._nested_tensor_from_mask(hidden_states, ~attention_mask)
485
+ /home/aliasgarov/copyright_checker/predictors.py:205: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.
486
+ probas = F.softmax(tensor_logits).detach().cpu().numpy()
487
+ /home/aliasgarov/copyright_checker/predictors.py:205: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.
488
+ probas = F.softmax(tensor_logits).detach().cpu().numpy()
489
+ Running on local URL: http://0.0.0.0:80
490
+ Running on public URL: https://e095d1a53e42b16b1b.gradio.live
 
 
 
 
 
 
 
 
491
 
492
+ This share link expires in 72 hours. For free permanent hosting and GPU upgrades, run `gradio deploy` from Terminal to deploy to Spaces (https://huggingface.co/spaces)
493
+ {'Lou Henry Hoover (March 29, 1874 – January 7, 1944) was the first lady of the United States from 1929 to 1933 as the wife of President Herbert Hoover.': -0.007083724433403481, 'She was active in community groups, including the Girl Scouts of the USA, which she led from 1922 to 1925 and from 1935 to 1937.': -0.005900632715473411, 'She was the first woman to earn a geology degree from Stanford.': 0.00288471219406703, 'In the first twenty years of their marriage, the Hoovers lived in several countries; during World War I, they led efforts to assist war refugees.': 0.0012162868179568342, 'Beginning in 1917, they lived in Washington, D.C., as Herbert became a high government official.': -0.006270546763081995, 'In the White House, Lou Hoover dedicated her time as first lady to her volunteer work, though she did not publicize it.': -6.844510148763104e-05, 'Her invitation of Jessie De Priest to the White House for tea was controversial in the South.': -0.008883191796269094, "After Herbert's defeat for re-election in 1932, Lou Hoover continued her work, helping provide refugee support with her husband during World War II, and died suddenly of a heart attack in 1944.": 0.005504050009961782} bc
494
+ Original BC scores: AI: 6.408023001114316e-09, HUMAN: 1.0
495
+ Calibration BC scores: AI: 0.0, HUMAN: 1.0
496
+ Models to Test: ['OpenAI GPT', 'Mistral', 'CLAUDE', 'Gemini', 'LLAMA 2']
497
+ Original BC scores: AI: 6.408023001114316e-09, HUMAN: 1.0
498
+ Calibration BC scores: AI: 0.0, HUMAN: 1.0
499
+ Starting MC
500
+ MC Score: {'OpenAI GPT': 0.0, 'Mistral': 0.0, 'CLAUDE': 0.0, 'Gemini': 0.0, 'LLAMA 2': 0.0}
501
+ {'Lou Henry Hoover (March 29, 1874 – January 7, 1944) was the first lady of the United States from 1929 to 1933 as the wife of President Herbert Hoover.': -0.599086635981887, 'She was active in community groups, including the Girl Scouts of the USA, which she led from 1922 to 1925 and from 1935 to 1937.': 0.08136319631271138, 'She was the first woman to earn a geology degree from Stanford.': 0.02834857510284846, 'In the first twenty years of their marriage, the Hoovers lived in several countries; during World War I, they led efforts to assist war refugees.': 0.061459884832511476, 'Beginning in 1917, they lived in Washington, D.C., as Herbert became a high government official.': 0.16672173091342543, 'In the White House, Lou Hoover dedicated her time as first lady to her volunteer work, though she did not publicize it.': 0.0820923392682848, 'Her invitation of Jessie De Priest to the White House for tea was controversial in the South.': 0.13399838230662856, "After Herbert's defeat for re-election in 1932, Lou Hoover continued her work, helping provide refugee support with her husband during World War II, and died suddenly of a heart attack in 1944.": 0.3821691921261263} quillbot
502
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
503
+ To disable this warning, you can either:
504
+ - Avoid using `tokenizers` before the fork if possible
505
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
506
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
507
+ To disable this warning, you can either:
508
+ - Avoid using `tokenizers` before the fork if possible
509
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
510
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
511
+ To disable this warning, you can either:
512
+ - Avoid using `tokenizers` before the fork if possible
513
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
514
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
515
+ To disable this warning, you can either:
516
+ - Avoid using `tokenizers` before the fork if possible
517
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
518
+ WARNING: Invalid HTTP request received.
519
+ WARNING: Invalid HTTP request received.
520
+ /home/aliasgarov/copyright_checker/predictors.py:205: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.
521
+ probas = F.softmax(tensor_logits).detach().cpu().numpy()
522
+ /home/aliasgarov/copyright_checker/predictors.py:205: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.
523
+ probas = F.softmax(tensor_logits).detach().cpu().numpy()
524
+ /home/aliasgarov/copyright_checker/predictors.py:205: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.
525
+ probas = F.softmax(tensor_logits).detach().cpu().numpy()
526
+ /home/aliasgarov/copyright_checker/predictors.py:205: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.
527
+ probas = F.softmax(tensor_logits).detach().cpu().numpy()
528
+ WARNING: Invalid HTTP request received.
529
+ WARNING: Invalid HTTP request received.
530
+ WARNING: Invalid HTTP request received.
531
+ WARNING: Invalid HTTP request received.
532
+ 2024-03-29 19:06:50.019873: I tensorflow/core/platform/cpu_feature_guard.cc:210] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.
533
  To enable the following instructions: AVX2 FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.
534
+ 2024-03-29 19:06:51.074912: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT
535
+ [nltk_data] Downloading package punkt to /home/aliasgarov/nltk_data...
536
  [nltk_data] Package punkt is already up-to-date!
537
+ [nltk_data] Downloading package punkt to /home/aliasgarov/nltk_data...
538
  [nltk_data] Package punkt is already up-to-date!
539
+ [nltk_data] Downloading package stopwords to
540
+ [nltk_data] /home/aliasgarov/nltk_data...
541
  [nltk_data] Package stopwords is already up-to-date!
542
+ The BetterTransformer implementation does not support padding during training, as the fused kernels do not support attention masks. Beware that passing padded batched data during training may result in unexpected outputs. Please refer to https://huggingface.co/docs/optimum/bettertransformer/overview for more details.
543
+ The BetterTransformer implementation does not support padding during training, as the fused kernels do not support attention masks. Beware that passing padded batched data during training may result in unexpected outputs. Please refer to https://huggingface.co/docs/optimum/bettertransformer/overview for more details.
544
+ The BetterTransformer implementation does not support padding during training, as the fused kernels do not support attention masks. Beware that passing padded batched data during training may result in unexpected outputs. Please refer to https://huggingface.co/docs/optimum/bettertransformer/overview for more details.
545
+ The BetterTransformer implementation does not support padding during training, as the fused kernels do not support attention masks. Beware that passing padded batched data during training may result in unexpected outputs. Please refer to https://huggingface.co/docs/optimum/bettertransformer/overview for more details.
546
+ The BetterTransformer implementation does not support padding during training, as the fused kernels do not support attention masks. Beware that passing padded batched data during training may result in unexpected outputs. Please refer to https://huggingface.co/docs/optimum/bettertransformer/overview for more details.
547
+ [nltk_data] Downloading package punkt to /home/aliasgarov/nltk_data...
548
  [nltk_data] Package punkt is already up-to-date!
549
+ [nltk_data] Downloading package punkt to /home/aliasgarov/nltk_data...
550
  [nltk_data] Package punkt is already up-to-date!
551
+ [nltk_data] Downloading package stopwords to
552
+ [nltk_data] /home/aliasgarov/nltk_data...
553
  [nltk_data] Package stopwords is already up-to-date!
554
+ /usr/bin/python3: No module named spacy
555
+ /home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/optimum/bettertransformer/models/encoder_models.py:301: UserWarning: The PyTorch API of nested tensors is in prototype stage and will change in the near future. (Triggered internally at ../aten/src/ATen/NestedTensorImpl.cpp:177.)
556
+ hidden_states = torch._nested_tensor_from_mask(hidden_states, ~attention_mask)
557
+ WARNING: Invalid HTTP request received.
558
+ WARNING: Invalid HTTP request received.
559
+ WARNING: Invalid HTTP request received.
560
+ WARNING: Invalid HTTP request received.
561
+ WARNING: Invalid HTTP request received.
562
+ WARNING: Invalid HTTP request received.
563
+ WARNING: Invalid HTTP request received.
564
+ WARNING: Invalid HTTP request received.
565
+ WARNING: Invalid HTTP request received.
566
+ WARNING: Invalid HTTP request received.
567
+ WARNING: Invalid HTTP request received.
568
+ WARNING: Invalid HTTP request received.
569
+ WARNING: Invalid HTTP request received.
570
+ WARNING: Invalid HTTP request received.
571
+ WARNING: Invalid HTTP request received.
572
+ WARNING: Invalid HTTP request received.
573
+ WARNING: Invalid HTTP request received.
574
+ WARNING: Invalid HTTP request received.
575
+ WARNING: Invalid HTTP request received.
576
+ WARNING: Invalid HTTP request received.
577
+ WARNING: Invalid HTTP request received.
578
+ WARNING: Invalid HTTP request received.
579
+ WARNING: Invalid HTTP request received.
580
+ WARNING: Invalid HTTP request received.
581
+ WARNING: Invalid HTTP request received.
582
+ WARNING: Invalid HTTP request received.
583
+ WARNING: Invalid HTTP request received.
584
+ WARNING: Invalid HTTP request received.
585
+ WARNING: Invalid HTTP request received.
586
+ WARNING: Invalid HTTP request received.
587
+ WARNING: Invalid HTTP request received.
588
+ WARNING: Invalid HTTP request received.
589
+ WARNING: Invalid HTTP request received.
590
+ WARNING: Invalid HTTP request received.
591
+ WARNING: Invalid HTTP request received.
592
+ WARNING: Invalid HTTP request received.
593
+ WARNING: Invalid HTTP request received.
594
+ WARNING: Invalid HTTP request received.
595
+ WARNING: Invalid HTTP request received.
596
+ WARNING: Invalid HTTP request received.
597
+ WARNING: Invalid HTTP request received.
598
+ WARNING: Invalid HTTP request received.
599
+ WARNING: Invalid HTTP request received.
600
+ WARNING: Invalid HTTP request received.
601
+ WARNING: Invalid HTTP request received.
602
+ WARNING: Invalid HTTP request received.
603
+ WARNING: Invalid HTTP request received.
604
+ WARNING: Invalid HTTP request received.
605
+ WARNING: Invalid HTTP request received.
606
+ WARNING: Invalid HTTP request received.
607
+ WARNING: Invalid HTTP request received.
608
+ WARNING: Invalid HTTP request received.
609
+ WARNING: Invalid HTTP request received.
610
+ WARNING: Invalid HTTP request received.
611
+ WARNING: Invalid HTTP request received.
612
+ WARNING: Invalid HTTP request received.
613
+ WARNING: Invalid HTTP request received.
614
+ WARNING: Invalid HTTP request received.
615
+ WARNING: Invalid HTTP request received.
616
+ WARNING: Invalid HTTP request received.
617
+ Token indices sequence length is longer than the specified maximum sequence length for this model (881 > 512). Running this sequence through the model will result in indexing errors
618
+ WARNING: Invalid HTTP request received.
619
+ WARNING: Invalid HTTP request received.
620
+ WARNING: Invalid HTTP request received.
621
+ WARNING: Invalid HTTP request received.
622
+ WARNING: Invalid HTTP request received.
623
+ WARNING: Invalid HTTP request received.
624
+ WARNING: Invalid HTTP request received.
625
+ /home/aliasgarov/copyright_checker/predictors.py:205: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.
626
+ probas = F.softmax(tensor_logits).detach().cpu().numpy()
627
+ /home/aliasgarov/copyright_checker/predictors.py:205: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.
628
+ probas = F.softmax(tensor_logits).detach().cpu().numpy()
629
+ /home/aliasgarov/copyright_checker/predictors.py:205: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.
630
+ probas = F.softmax(tensor_logits).detach().cpu().numpy()
631
+ /home/aliasgarov/copyright_checker/predictors.py:205: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.
632
+ probas = F.softmax(tensor_logits).detach().cpu().numpy()
633
+ /home/aliasgarov/copyright_checker/predictors.py:205: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.
634
+ probas = F.softmax(tensor_logits).detach().cpu().numpy()
635
+ WARNING: Invalid HTTP request received.
636
+ WARNING: Invalid HTTP request received.
637
+ WARNING: Invalid HTTP request received.
638
+ WARNING: Invalid HTTP request received.
639
+ WARNING: Invalid HTTP request received.
640
+ WARNING: Invalid HTTP request received.
641
+ WARNING: Invalid HTTP request received.
642
+ WARNING: Invalid HTTP request received.
643
+ WARNING: Invalid HTTP request received.
644
+ WARNING: Invalid HTTP request received.
645
+ WARNING: Invalid HTTP request received.
646
+ WARNING: Invalid HTTP request received.
647
+ /home/aliasgarov/copyright_checker/predictors.py:205: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.
648
+ probas = F.softmax(tensor_logits).detach().cpu().numpy()
649
+ /home/aliasgarov/copyright_checker/predictors.py:205: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.
650
+ probas = F.softmax(tensor_logits).detach().cpu().numpy()
651
+ /home/aliasgarov/copyright_checker/predictors.py:205: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.
652
+ probas = F.softmax(tensor_logits).detach().cpu().numpy()
653
+ Running on local URL: http://0.0.0.0:80
654
+ Running on public URL: https://94e72aa3904122b29c.gradio.live
655
 
656
+ This share link expires in 72 hours. For free permanent hosting and GPU upgrades, run `gradio deploy` from Terminal to deploy to Spaces (https://huggingface.co/spaces)
657
+ Original BC scores: AI: 1.0, HUMAN: 1.7993962986295742e-09
658
+ Calibration BC scores: AI: 0.9994855305466238, HUMAN: 0.0005144694533761873
659
+ Models to Test: ['OpenAI GPT', 'Mistral', 'CLAUDE', 'Gemini', 'LLAMA 2']
660
+ Original BC scores: AI: 1.0, HUMAN: 1.7993962986295742e-09
661
+ Calibration BC scores: AI: 0.9994855305466238, HUMAN: 0.0005144694533761873
662
+ Starting MC
663
+ MC Score: {'OpenAI GPT': 0.9994855300308254, 'Mistral': 5.1601761922609244e-11, 'CLAUDE': 8.48403323344426e-11, 'Gemini': 2.8437433518348655e-10, 'LLAMA 2': 9.498188443606356e-11}
664
+ Original BC scores: AI: 1.0, HUMAN: 1.7997051626750249e-09
665
+ Calibration BC scores: AI: 0.9994855305466238, HUMAN: 0.0005144694533761873
666
+ Original BC scores: AI: 0.6732428669929504, HUMAN: 0.3267570436000824
667
+ Calibration BC scores: AI: 0.4375, HUMAN: 0.5625
668
+ Original BC scores: AI: 0.5024993419647217, HUMAN: 0.49750062823295593
669
+ Calibration BC scores: AI: 0.4375, HUMAN: 0.5625
670
+ Original BC scores: AI: 0.7561723589897156, HUMAN: 0.24382765591144562
671
+ Calibration BC scores: AI: 0.4375, HUMAN: 0.5625
672
+ Original BC scores: AI: 1.0, HUMAN: 1.8036925286679661e-09
673
+ Calibration BC scores: AI: 0.9994855305466238, HUMAN: 0.0005144694533761873
674
+ Original BC scores: AI: 0.7560267448425293, HUMAN: 0.24397319555282593
675
+ Calibration BC scores: AI: 0.4375, HUMAN: 0.5625
676
+ Original BC scores: AI: 0.989621639251709, HUMAN: 0.010378347709774971
677
+ Calibration BC scores: AI: 0.5178571428571429, HUMAN: 0.4821428571428571
678
+ Original BC scores: AI: 1.0, HUMAN: 2.039939994702422e-09
679
+ Calibration BC scores: AI: 0.9994855305466238, HUMAN: 0.0005144694533761873
680
+ Models to Test: ['OpenAI GPT', 'Mistral', 'CLAUDE', 'Gemini', 'LLAMA 2']
681
+ Original BC scores: AI: 1.0, HUMAN: 2.039939994702422e-09
682
+ Calibration BC scores: AI: 0.9994855305466238, HUMAN: 0.0005144694533761873
683
+ Starting MC
684
+ MC Score: {'OpenAI GPT': 0.9994855298515718, 'Mistral': 4.535480345181983e-11, 'CLAUDE': 2.261075985034601e-10, 'Gemini': 3.1878497183516737e-10, 'LLAMA 2': 1.0480460580159845e-10}
685
+ Original BC scores: AI: 1.0, HUMAN: 2.039939994702422e-09
686
+ Calibration BC scores: AI: 0.9994855305466238, HUMAN: 0.0005144694533761873
687
+ Models to Test: ['OpenAI GPT', 'Mistral', 'CLAUDE', 'Gemini', 'LLAMA 2']
688
+ Original BC scores: AI: 1.0, HUMAN: 2.039939994702422e-09
689
+ Calibration BC scores: AI: 0.9994855305466238, HUMAN: 0.0005144694533761873
690
+ Starting MC
691
+ MC Score: {'OpenAI GPT': 0.9994855298515718, 'Mistral': 4.535480345181983e-11, 'CLAUDE': 2.261075985034601e-10, 'Gemini': 3.1878497183516737e-10, 'LLAMA 2': 1.0480460580159845e-10}
692
+ {'Add-on features now encompass AI and Source Identification, leveraging forensic linguistic analysis to ascertain the origin, reliability, and authenticity of content.': -0.15216478135731262, 'These advanced tools can distinguish between human and AI-generated material, pinpointing the specific AI models employed in creation.': -0.05895885252560595, 'This enhancement bolsters the ability to assess content trustworthiness effectively.': 0.03353039204460538} bc
693
+ Original BC scores: AI: 0.998177170753479, HUMAN: 0.0018228011904284358
694
+ Calibration BC scores: AI: 0.6614420062695925, HUMAN: 0.3385579937304075
695
+ Models to Test: ['OpenAI GPT', 'Mistral', 'CLAUDE', 'Gemini', 'LLAMA 2']
696
+ Original BC scores: AI: 0.998177170753479, HUMAN: 0.0018228011904284358
697
+ Calibration BC scores: AI: 0.6614420062695925, HUMAN: 0.3385579937304075
698
+ Starting MC
699
+ MC Score: {'OpenAI GPT': 0.6614420057714218, 'Mistral': 2.7132188074993352e-11, 'CLAUDE': 1.2335682936047867e-10, 'Gemini': 1.7620911369483686e-10, 'LLAMA 2': 1.714725314469418e-10}
700
+ {'AI Identification and Source Identification are add-on capabilities that use forensic linguistic analysis to offer insights into the origin, dependability, and trustworthiness of content as well as whether it was created by humans or artificial intelligence (AI).': -0.006323229799663152, 'They can even identify the precise AI models that were used to create the content.': 0.017586576131630234} bc
701
+ {'AI Identification and Source Identification are add-on capabilities that use forensic linguistic analysis to offer insights into the origin, dependability, and trustworthiness of content as well as whether it was created by humans or artificial intelligence (AI).': -0.43261755952898956, 'They can even identify the precise AI models that were used to create the content.': 0.10732631520197373} quillbot
702
+ {'AI Identification and Source Identification are add-on capabilities that use forensic linguistic analysis to offer insights into the origin, dependability, and trustworthiness of content as well as whether it was created by humans or artificial intelligence (AI).': -0.4322117278076279, 'They can even identify the precise AI models that were used to create the content.': 0.10778412185868685} quillbot
703
+ {'AI Identification and Source Identification are add-on capabilities that use forensic linguistic analysis to offer insights into the origin, dependability, and trustworthiness of content as well as whether it was created by humans or artificial intelligence (AI).': -0.43300422387049115, 'They can even identify the precise AI models that were used to create the content.': 0.10687924275434384} quillbot
704
+ {'Add-on feat ures now encompass AI and Source Identifi cation, leveraging for ensic linguistic analysis to ascertain the origin, reliability, and authen ticity of content.': -0.16172325612226013, 'These advanc ed tools can distinguish between human and AI-generated material, pin pointing the specific AI models employed in creation.': -0.06511130357854991, 'This enhance ment bolsters the ability to assess content trust worthiness effectively.': 0.05332794099561823} bc
705
+ {'Add-on feat ures now encompass AI and Source Identifi cation, leveraging for ensic linguistic analysis to ascertain the origin, reliability, and authen ticity of content.': -0.16378145994849636, 'These advanc ed tools can distinguish between human and AI-generated material, pin pointing the specific AI models employed in creation.': -0.06739973523793355, 'This enhance ment bolsters the ability to assess content trust worthiness effectively.': 0.05366690466131973} bc
706
+ Original BC scores: AI: 0.995067834854126, HUMAN: 0.004932152573019266
707
+ Calibration BC scores: AI: 0.5957446808510638, HUMAN: 0.4042553191489362
708
+ Models to Test: ['OpenAI GPT', 'Mistral', 'CLAUDE', 'Gemini', 'LLAMA 2']
709
+ Original BC scores: AI: 0.995067834854126, HUMAN: 0.004932152573019266
710
+ Calibration BC scores: AI: 0.5957446808510638, HUMAN: 0.4042553191489362
711
+ Starting MC
712
+ MC Score: {'OpenAI GPT': 0.5957441340683721, 'Mistral': 2.0416833660118585e-10, 'CLAUDE': 5.001776967436859e-07, 'Gemini': 2.5271727453711155e-08, 'LLAMA 2': 2.1129099166428725e-08}
713
+ Original BC scores: AI: 0.00025900782202370465, HUMAN: 0.9997410178184509
714
+ Calibration BC scores: AI: 0.04296875, HUMAN: 0.95703125
715
+ Models to Test: ['OpenAI GPT', 'Mistral', 'CLAUDE', 'Gemini', 'LLAMA 2']
716
+ Original BC scores: AI: 0.00025900782202370465, HUMAN: 0.9997410178184509
717
+ Calibration BC scores: AI: 0.04296875, HUMAN: 0.95703125
718
+ Starting MC
719
+ MC Score: {'OpenAI GPT': 0.025428532807609403, 'Mistral': 1.6376084024317497e-09, 'CLAUDE': 1.6831211047289287e-06, 'Gemini': 1.8230926181583228e-06, 'LLAMA 2': 0.017536709341059307}
720
+ WARNING: Invalid HTTP request received.
721
+ WARNING: Invalid HTTP request received.
722
+ WARNING: Invalid HTTP request received.
723
+ WARNING: Invalid HTTP request received.
724
+ WARNING: Invalid HTTP request received.
725
+ WARNING: Invalid HTTP request received.
726
+ WARNING: Invalid HTTP request received.
727
+ WARNING: Invalid HTTP request received.
728
+ WARNING: Invalid HTTP request received.
729
+ WARNING: Invalid HTTP request received.
730
+ WARNING: Invalid HTTP request received.
731
+ WARNING: Invalid HTTP request received.
732
+ WARNING: Invalid HTTP request received.
733
+ WARNING: Invalid HTTP request received.
734
+ WARNING: Invalid HTTP request received.
735
+ WARNING: Invalid HTTP request received.
736
+ WARNING: Invalid HTTP request received.
737
+ /home/aliasgarov/copyright_checker/predictors.py:205: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.
738
+ probas = F.softmax(tensor_logits).detach().cpu().numpy()
739
+ /home/aliasgarov/copyright_checker/predictors.py:205: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.
740
+ probas = F.softmax(tensor_logits).detach().cpu().numpy()
741
+ /home/aliasgarov/copyright_checker/predictors.py:205: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.
742
+ probas = F.softmax(tensor_logits).detach().cpu().numpy()
743
+ /home/aliasgarov/copyright_checker/predictors.py:205: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.
744
+ probas = F.softmax(tensor_logits).detach().cpu().numpy()
745
+ WARNING: Invalid HTTP request received.
746
+ WARNING: Invalid HTTP request received.
747
+ WARNING: Invalid HTTP request received.
748
+ WARNING: Invalid HTTP request received.
749
+ WARNING: Invalid HTTP request received.
750
+ WARNING: Invalid HTTP request received.
751
+ WARNING: Invalid HTTP request received.
752
+ WARNING: Invalid HTTP request received.
753
+ WARNING: Invalid HTTP request received.
754
+ WARNING: Invalid HTTP request received.
755
+ WARNING: Invalid HTTP request received.
756
+ WARNING: Invalid HTTP request received.
757
+ WARNING: Invalid HTTP request received.
758
+ WARNING: Invalid HTTP request received.
759
+ WARNING: Invalid HTTP request received.
760
+ WARNING: Invalid HTTP request received.
761
+ WARNING: Invalid HTTP request received.
762
+ WARNING: Invalid HTTP request received.
763
+ WARNING: Invalid HTTP request received.
764
+ WARNING: Invalid HTTP request received.
765
+ WARNING: Invalid HTTP request received.
766
+ ERROR: Exception in ASGI application
767
+ Traceback (most recent call last):
768
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/uvicorn/protocols/http/h11_impl.py", line 407, in run_asgi
769
+ result = await app( # type: ignore[func-returns-value]
770
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/uvicorn/middleware/proxy_headers.py", line 69, in __call__
771
+ return await self.app(scope, receive, send)
772
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/fastapi/applications.py", line 1054, in __call__
773
+ await super().__call__(scope, receive, send)
774
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/starlette/applications.py", line 123, in __call__
775
+ await self.middleware_stack(scope, receive, send)
776
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/starlette/middleware/errors.py", line 186, in __call__
777
+ raise exc
778
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/starlette/middleware/errors.py", line 164, in __call__
779
+ await self.app(scope, receive, _send)
780
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/gradio/route_utils.py", line 680, in __call__
781
+ await self.app(scope, receive, send)
782
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/starlette/middleware/exceptions.py", line 62, in __call__
783
+ await wrap_app_handling_exceptions(self.app, conn)(scope, receive, send)
784
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/starlette/_exception_handler.py", line 78, in wrapped_app
785
+ await response(scope, receive, sender)
786
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/starlette/responses.py", line 151, in __call__
787
+ await send(
788
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/starlette/_exception_handler.py", line 50, in sender
789
+ await send(message)
790
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/starlette/middleware/errors.py", line 161, in _send
791
+ await send(message)
792
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/uvicorn/protocols/http/h11_impl.py", line 489, in send
793
+ output = self.conn.send(event=response)
794
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/h11/_connection.py", line 512, in send
795
+ data_list = self.send_with_data_passthrough(event)
796
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/h11/_connection.py", line 537, in send_with_data_passthrough
797
+ self._process_event(self.our_role, event)
798
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/h11/_connection.py", line 272, in _process_event
799
+ self._cstate.process_event(role, type(event), server_switch_event)
800
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/h11/_state.py", line 293, in process_event
801
+ self._fire_event_triggered_transitions(role, _event_type)
802
+ File "/home/aliasgarov/copyright_checker/venv/lib/python3.10/site-packages/h11/_state.py", line 311, in _fire_event_triggered_transitions
803
+ raise LocalProtocolError(
804
+ h11._util.LocalProtocolError: can't handle event type Response when role=SERVER and state=MUST_CLOSE
805
+ WARNING: Invalid HTTP request received.
806
+ WARNING: Invalid HTTP request received.
807
+ WARNING: Invalid HTTP request received.
808
+ WARNING: Invalid HTTP request received.
809
+ WARNING: Invalid HTTP request received.
810
+ WARNING: Invalid HTTP request received.
811
+ WARNING: Invalid HTTP request received.
predictors.py CHANGED
@@ -20,12 +20,14 @@ import yaml
20
  import os
21
  from utils import *
22
  import joblib
 
23
 
24
  with open("config.yaml", "r") as file:
25
  params = yaml.safe_load(file)
26
  nltk.download("punkt")
27
  nltk.download("stopwords")
28
- device = "cuda" if torch.cuda.is_available() else "cpu"
 
29
  text_bc_model_path = params["TEXT_BC_MODEL_PATH"]
30
  text_mc_model_path = params["TEXT_MC_MODEL_PATH"]
31
  text_quillbot_model_path = params["TEXT_QUILLBOT_MODEL_PATH"]
@@ -60,12 +62,18 @@ mini_bc_model_name = "polygraf-ai/bc-model-bert-mini"
60
  bc_tokenizer_mini = AutoTokenizer.from_pretrained(mini_bc_model_name)
61
  bc_model_mini = AutoModelForSequenceClassification.from_pretrained(
62
  mini_bc_model_name
63
- ).to(device)
64
  mini_humanizer_model_name = "polygraf-ai/quillbot-detector-bert-mini-9K"
65
  humanizer_tokenizer_mini = AutoTokenizer.from_pretrained(mini_humanizer_model_name)
66
  humanizer_model_mini = AutoModelForSequenceClassification.from_pretrained(
67
  mini_humanizer_model_name
68
- ).to(device)
 
 
 
 
 
 
69
 
70
  # model score calibration
71
  iso_reg = joblib.load("isotonic_regression_model.joblib")
@@ -191,7 +199,7 @@ def predict_for_explainanility(text, model_type=None):
191
  padding="max_length",
192
  truncation=True,
193
  max_length=max_length,
194
- ).to(device)
195
  outputs = model(**tokenized_text)
196
  tensor_logits = outputs[0]
197
  probas = F.softmax(tensor_logits).detach().cpu().numpy()
 
20
  import os
21
  from utils import *
22
  import joblib
23
+ from optimum.bettertransformer import BetterTransformer
24
 
25
  with open("config.yaml", "r") as file:
26
  params = yaml.safe_load(file)
27
  nltk.download("punkt")
28
  nltk.download("stopwords")
29
+ device_needed = "cuda" if torch.cuda.is_available() else "cpu"
30
+ device = 'cpu'
31
  text_bc_model_path = params["TEXT_BC_MODEL_PATH"]
32
  text_mc_model_path = params["TEXT_MC_MODEL_PATH"]
33
  text_quillbot_model_path = params["TEXT_QUILLBOT_MODEL_PATH"]
 
62
  bc_tokenizer_mini = AutoTokenizer.from_pretrained(mini_bc_model_name)
63
  bc_model_mini = AutoModelForSequenceClassification.from_pretrained(
64
  mini_bc_model_name
65
+ ).to(device_needed)
66
  mini_humanizer_model_name = "polygraf-ai/quillbot-detector-bert-mini-9K"
67
  humanizer_tokenizer_mini = AutoTokenizer.from_pretrained(mini_humanizer_model_name)
68
  humanizer_model_mini = AutoModelForSequenceClassification.from_pretrained(
69
  mini_humanizer_model_name
70
+ ).to(device_needed)
71
+
72
+ bc_model_mini = BetterTransformer.transform(bc_model_mini)
73
+ humanizer_model_mini = BetterTransformer.transform(humanizer_model_mini)
74
+ text_bc_model = BetterTransformer.transform(text_bc_model)
75
+ text_mc_model = BetterTransformer.transform(text_mc_model)
76
+ quillbot_model = BetterTransformer.transform(quillbot_model)
77
 
78
  # model score calibration
79
  iso_reg = joblib.load("isotonic_regression_model.joblib")
 
199
  padding="max_length",
200
  truncation=True,
201
  max_length=max_length,
202
+ ).to(device_needed)
203
  outputs = model(**tokenized_text)
204
  tensor_logits = outputs[0]
205
  probas = F.softmax(tensor_logits).detach().cpu().numpy()
requirements.txt CHANGED
@@ -25,4 +25,5 @@ sentence-transformers
25
  Unidecode
26
  python-dotenv
27
  lime
28
- joblib
 
 
25
  Unidecode
26
  python-dotenv
27
  lime
28
+ joblib
29
+ optimum