Colab file to test model
#1
by
haoruili
- opened
- 40_1_79_7_svm.ipynb +288 -0
40_1_79_7_svm.ipynb
ADDED
@@ -0,0 +1,288 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"nbformat": 4,
|
3 |
+
"nbformat_minor": 0,
|
4 |
+
"metadata": {
|
5 |
+
"colab": {
|
6 |
+
"provenance": []
|
7 |
+
},
|
8 |
+
"kernelspec": {
|
9 |
+
"name": "python3",
|
10 |
+
"display_name": "Python 3"
|
11 |
+
},
|
12 |
+
"language_info": {
|
13 |
+
"name": "python"
|
14 |
+
}
|
15 |
+
},
|
16 |
+
"cells": [
|
17 |
+
{
|
18 |
+
"cell_type": "markdown",
|
19 |
+
"source": [
|
20 |
+
"# Group 40 _round 1"
|
21 |
+
],
|
22 |
+
"metadata": {
|
23 |
+
"id": "mbUcS2nQmNlP"
|
24 |
+
}
|
25 |
+
},
|
26 |
+
{
|
27 |
+
"cell_type": "markdown",
|
28 |
+
"source": [
|
29 |
+
"# To test this group, we need to download their files and the metadata.csv as test dataset, and then uplaod them directly into the \"Files\". We do not need token and further pipeline code as in our provided colab file"
|
30 |
+
],
|
31 |
+
"metadata": {
|
32 |
+
"id": "hrRMVJ5Zmrhx"
|
33 |
+
}
|
34 |
+
},
|
35 |
+
{
|
36 |
+
"cell_type": "code",
|
37 |
+
"execution_count": null,
|
38 |
+
"metadata": {
|
39 |
+
"colab": {
|
40 |
+
"base_uri": "https://localhost:8080/"
|
41 |
+
},
|
42 |
+
"id": "z5NrnuaekzZP",
|
43 |
+
"outputId": "e2270757-3f09-489b-af54-2047164d06b0"
|
44 |
+
},
|
45 |
+
"outputs": [
|
46 |
+
{
|
47 |
+
"output_type": "stream",
|
48 |
+
"name": "stdout",
|
49 |
+
"text": [
|
50 |
+
"Requirement already satisfied: nltk in /usr/local/lib/python3.10/dist-packages (3.9.1)\n",
|
51 |
+
"Requirement already satisfied: beautifulsoup4 in /usr/local/lib/python3.10/dist-packages (4.12.3)\n",
|
52 |
+
"Requirement already satisfied: scikit-learn in /usr/local/lib/python3.10/dist-packages (1.5.2)\n",
|
53 |
+
"Requirement already satisfied: pandas in /usr/local/lib/python3.10/dist-packages (2.2.2)\n",
|
54 |
+
"Collecting datasets\n",
|
55 |
+
" Downloading datasets-3.2.0-py3-none-any.whl.metadata (20 kB)\n",
|
56 |
+
"Requirement already satisfied: fsspec in /usr/local/lib/python3.10/dist-packages (2024.10.0)\n",
|
57 |
+
"Requirement already satisfied: huggingface_hub in /usr/local/lib/python3.10/dist-packages (0.26.5)\n",
|
58 |
+
"Requirement already satisfied: click in /usr/local/lib/python3.10/dist-packages (from nltk) (8.1.7)\n",
|
59 |
+
"Requirement already satisfied: joblib in /usr/local/lib/python3.10/dist-packages (from nltk) (1.4.2)\n",
|
60 |
+
"Requirement already satisfied: regex>=2021.8.3 in /usr/local/lib/python3.10/dist-packages (from nltk) (2024.9.11)\n",
|
61 |
+
"Requirement already satisfied: tqdm in /usr/local/lib/python3.10/dist-packages (from nltk) (4.66.6)\n",
|
62 |
+
"Requirement already satisfied: soupsieve>1.2 in /usr/local/lib/python3.10/dist-packages (from beautifulsoup4) (2.6)\n",
|
63 |
+
"Requirement already satisfied: numpy>=1.19.5 in /usr/local/lib/python3.10/dist-packages (from scikit-learn) (1.26.4)\n",
|
64 |
+
"Requirement already satisfied: scipy>=1.6.0 in /usr/local/lib/python3.10/dist-packages (from scikit-learn) (1.13.1)\n",
|
65 |
+
"Requirement already satisfied: threadpoolctl>=3.1.0 in /usr/local/lib/python3.10/dist-packages (from scikit-learn) (3.5.0)\n",
|
66 |
+
"Requirement already satisfied: python-dateutil>=2.8.2 in /usr/local/lib/python3.10/dist-packages (from pandas) (2.8.2)\n",
|
67 |
+
"Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.10/dist-packages (from pandas) (2024.2)\n",
|
68 |
+
"Requirement already satisfied: tzdata>=2022.7 in /usr/local/lib/python3.10/dist-packages (from pandas) (2024.2)\n",
|
69 |
+
"Requirement already satisfied: filelock in /usr/local/lib/python3.10/dist-packages (from datasets) (3.16.1)\n",
|
70 |
+
"Requirement already satisfied: pyarrow>=15.0.0 in /usr/local/lib/python3.10/dist-packages (from datasets) (17.0.0)\n",
|
71 |
+
"Collecting dill<0.3.9,>=0.3.0 (from datasets)\n",
|
72 |
+
" Downloading dill-0.3.8-py3-none-any.whl.metadata (10 kB)\n",
|
73 |
+
"Requirement already satisfied: requests>=2.32.2 in /usr/local/lib/python3.10/dist-packages (from datasets) (2.32.3)\n",
|
74 |
+
"Collecting xxhash (from datasets)\n",
|
75 |
+
" Downloading xxhash-3.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (12 kB)\n",
|
76 |
+
"Collecting multiprocess<0.70.17 (from datasets)\n",
|
77 |
+
" Downloading multiprocess-0.70.16-py310-none-any.whl.metadata (7.2 kB)\n",
|
78 |
+
"Collecting fsspec\n",
|
79 |
+
" Downloading fsspec-2024.9.0-py3-none-any.whl.metadata (11 kB)\n",
|
80 |
+
"Requirement already satisfied: aiohttp in /usr/local/lib/python3.10/dist-packages (from datasets) (3.11.10)\n",
|
81 |
+
"Requirement already satisfied: packaging in /usr/local/lib/python3.10/dist-packages (from datasets) (24.2)\n",
|
82 |
+
"Requirement already satisfied: pyyaml>=5.1 in /usr/local/lib/python3.10/dist-packages (from datasets) (6.0.2)\n",
|
83 |
+
"Requirement already satisfied: typing-extensions>=3.7.4.3 in /usr/local/lib/python3.10/dist-packages (from huggingface_hub) (4.12.2)\n",
|
84 |
+
"Requirement already satisfied: aiohappyeyeballs>=2.3.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets) (2.4.4)\n",
|
85 |
+
"Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets) (1.3.1)\n",
|
86 |
+
"Requirement already satisfied: async-timeout<6.0,>=4.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets) (4.0.3)\n",
|
87 |
+
"Requirement already satisfied: attrs>=17.3.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets) (24.2.0)\n",
|
88 |
+
"Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets) (1.5.0)\n",
|
89 |
+
"Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets) (6.1.0)\n",
|
90 |
+
"Requirement already satisfied: propcache>=0.2.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets) (0.2.1)\n",
|
91 |
+
"Requirement already satisfied: yarl<2.0,>=1.17.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp->datasets) (1.18.3)\n",
|
92 |
+
"Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.10/dist-packages (from python-dateutil>=2.8.2->pandas) (1.17.0)\n",
|
93 |
+
"Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests>=2.32.2->datasets) (3.4.0)\n",
|
94 |
+
"Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests>=2.32.2->datasets) (3.10)\n",
|
95 |
+
"Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests>=2.32.2->datasets) (2.2.3)\n",
|
96 |
+
"Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests>=2.32.2->datasets) (2024.8.30)\n",
|
97 |
+
"Downloading datasets-3.2.0-py3-none-any.whl (480 kB)\n",
|
98 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m480.6/480.6 kB\u001b[0m \u001b[31m10.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
99 |
+
"\u001b[?25hDownloading fsspec-2024.9.0-py3-none-any.whl (179 kB)\n",
|
100 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m179.3/179.3 kB\u001b[0m \u001b[31m10.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
101 |
+
"\u001b[?25hDownloading dill-0.3.8-py3-none-any.whl (116 kB)\n",
|
102 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m116.3/116.3 kB\u001b[0m \u001b[31m8.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
103 |
+
"\u001b[?25hDownloading multiprocess-0.70.16-py310-none-any.whl (134 kB)\n",
|
104 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m134.8/134.8 kB\u001b[0m \u001b[31m10.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
105 |
+
"\u001b[?25hDownloading xxhash-3.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (194 kB)\n",
|
106 |
+
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m194.1/194.1 kB\u001b[0m \u001b[31m14.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
107 |
+
"\u001b[?25hInstalling collected packages: xxhash, fsspec, dill, multiprocess, datasets\n",
|
108 |
+
" Attempting uninstall: fsspec\n",
|
109 |
+
" Found existing installation: fsspec 2024.10.0\n",
|
110 |
+
" Uninstalling fsspec-2024.10.0:\n",
|
111 |
+
" Successfully uninstalled fsspec-2024.10.0\n",
|
112 |
+
"\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n",
|
113 |
+
"gcsfs 2024.10.0 requires fsspec==2024.10.0, but you have fsspec 2024.9.0 which is incompatible.\u001b[0m\u001b[31m\n",
|
114 |
+
"\u001b[0mSuccessfully installed datasets-3.2.0 dill-0.3.8 fsspec-2024.9.0 multiprocess-0.70.16 xxhash-3.5.0\n"
|
115 |
+
]
|
116 |
+
}
|
117 |
+
],
|
118 |
+
"source": [
|
119 |
+
"pip install nltk beautifulsoup4 scikit-learn pandas datasets fsspec huggingface_hub"
|
120 |
+
]
|
121 |
+
},
|
122 |
+
{
|
123 |
+
"cell_type": "code",
|
124 |
+
"source": [
|
125 |
+
"import nltk\n",
|
126 |
+
"nltk.download('stopwords')\n",
|
127 |
+
"nltk.download('wordnet')\n",
|
128 |
+
"nltk.download('omw-1.4')"
|
129 |
+
],
|
130 |
+
"metadata": {
|
131 |
+
"colab": {
|
132 |
+
"base_uri": "https://localhost:8080/"
|
133 |
+
},
|
134 |
+
"id": "CGumtNXElEhY",
|
135 |
+
"outputId": "76e996f5-150c-4fc7-830f-afdf533e61e0"
|
136 |
+
},
|
137 |
+
"execution_count": null,
|
138 |
+
"outputs": [
|
139 |
+
{
|
140 |
+
"output_type": "stream",
|
141 |
+
"name": "stderr",
|
142 |
+
"text": [
|
143 |
+
"[nltk_data] Downloading package stopwords to /root/nltk_data...\n",
|
144 |
+
"[nltk_data] Unzipping corpora/stopwords.zip.\n",
|
145 |
+
"[nltk_data] Downloading package wordnet to /root/nltk_data...\n",
|
146 |
+
"[nltk_data] Downloading package omw-1.4 to /root/nltk_data...\n"
|
147 |
+
]
|
148 |
+
},
|
149 |
+
{
|
150 |
+
"output_type": "execute_result",
|
151 |
+
"data": {
|
152 |
+
"text/plain": [
|
153 |
+
"True"
|
154 |
+
]
|
155 |
+
},
|
156 |
+
"metadata": {},
|
157 |
+
"execution_count": 2
|
158 |
+
}
|
159 |
+
]
|
160 |
+
},
|
161 |
+
{
|
162 |
+
"cell_type": "code",
|
163 |
+
"source": [
|
164 |
+
"from data_cleaning import clean\n",
|
165 |
+
"import pandas as pd\n",
|
166 |
+
"import nltk\n",
|
167 |
+
"nltk.download('stopwords')"
|
168 |
+
],
|
169 |
+
"metadata": {
|
170 |
+
"colab": {
|
171 |
+
"base_uri": "https://localhost:8080/"
|
172 |
+
},
|
173 |
+
"id": "h5etdnrMlH-2",
|
174 |
+
"outputId": "13ae15c0-29f3-41fd-f992-98f2b97731fb"
|
175 |
+
},
|
176 |
+
"execution_count": null,
|
177 |
+
"outputs": [
|
178 |
+
{
|
179 |
+
"output_type": "stream",
|
180 |
+
"name": "stderr",
|
181 |
+
"text": [
|
182 |
+
"[nltk_data] Downloading package stopwords to /root/nltk_data...\n",
|
183 |
+
"[nltk_data] Package stopwords is already up-to-date!\n"
|
184 |
+
]
|
185 |
+
},
|
186 |
+
{
|
187 |
+
"output_type": "execute_result",
|
188 |
+
"data": {
|
189 |
+
"text/plain": [
|
190 |
+
"True"
|
191 |
+
]
|
192 |
+
},
|
193 |
+
"metadata": {},
|
194 |
+
"execution_count": 3
|
195 |
+
}
|
196 |
+
]
|
197 |
+
},
|
198 |
+
{
|
199 |
+
"cell_type": "code",
|
200 |
+
"source": [
|
201 |
+
"df = pd.read_csv(\"/content/metadata.csv\") # Haorui downloaed the test dataset in their repo for test dataset and upload it into the \"Files\", replaced the old path\n",
|
202 |
+
"\n",
|
203 |
+
"cleaned_df = clean(df)"
|
204 |
+
],
|
205 |
+
"metadata": {
|
206 |
+
"colab": {
|
207 |
+
"base_uri": "https://localhost:8080/"
|
208 |
+
},
|
209 |
+
"id": "2N4asei9lLDG",
|
210 |
+
"outputId": "348b7a48-1320-4513-9985-bf8f7be9f820"
|
211 |
+
},
|
212 |
+
"execution_count": null,
|
213 |
+
"outputs": [
|
214 |
+
{
|
215 |
+
"output_type": "stream",
|
216 |
+
"name": "stderr",
|
217 |
+
"text": [
|
218 |
+
"/content/data_cleaning.py:11: MarkupResemblesLocatorWarning: The input looks more like a filename than markup. You may want to open this file and pass the filehandle into Beautiful Soup.\n",
|
219 |
+
" headline = BeautifulSoup(headline, 'html.parser').get_text()\n"
|
220 |
+
]
|
221 |
+
}
|
222 |
+
]
|
223 |
+
},
|
224 |
+
{
|
225 |
+
"cell_type": "code",
|
226 |
+
"source": [
|
227 |
+
"from tfidf import tfidf\n",
|
228 |
+
"\n",
|
229 |
+
"\n",
|
230 |
+
"X_new_tfidf = tfidf.transform(cleaned_df['title'])"
|
231 |
+
],
|
232 |
+
"metadata": {
|
233 |
+
"colab": {
|
234 |
+
"base_uri": "https://localhost:8080/"
|
235 |
+
},
|
236 |
+
"id": "R6oxMUe1lZ6H",
|
237 |
+
"outputId": "23da442e-6a27-448e-8032-cb274c09729d"
|
238 |
+
},
|
239 |
+
"execution_count": null,
|
240 |
+
"outputs": [
|
241 |
+
{
|
242 |
+
"output_type": "stream",
|
243 |
+
"name": "stderr",
|
244 |
+
"text": [
|
245 |
+
"/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_auth.py:94: UserWarning: \n",
|
246 |
+
"The secret `HF_TOKEN` does not exist in your Colab secrets.\n",
|
247 |
+
"To authenticate with the Hugging Face Hub, create a token in your settings tab (https://huggingface.co/settings/tokens), set it as secret in your Google Colab and restart your session.\n",
|
248 |
+
"You will be able to reuse this secret in all of your notebooks.\n",
|
249 |
+
"Please note that authentication is recommended but still optional to access public models or datasets.\n",
|
250 |
+
" warnings.warn(\n"
|
251 |
+
]
|
252 |
+
}
|
253 |
+
]
|
254 |
+
},
|
255 |
+
{
|
256 |
+
"cell_type": "code",
|
257 |
+
"source": [
|
258 |
+
"from svm import svm_model"
|
259 |
+
],
|
260 |
+
"metadata": {
|
261 |
+
"colab": {
|
262 |
+
"base_uri": "https://localhost:8080/"
|
263 |
+
},
|
264 |
+
"id": "8r_lzOdzldUm",
|
265 |
+
"outputId": "4466ff74-5dcc-4472-e105-15e3d7804d2a"
|
266 |
+
},
|
267 |
+
"execution_count": null,
|
268 |
+
"outputs": [
|
269 |
+
{
|
270 |
+
"output_type": "stream",
|
271 |
+
"name": "stdout",
|
272 |
+
"text": [
|
273 |
+
"Accuracy: 0.7970\n",
|
274 |
+
" precision recall f1-score support\n",
|
275 |
+
"\n",
|
276 |
+
" 0 0.81 0.75 0.78 293\n",
|
277 |
+
" 1 0.78 0.84 0.81 313\n",
|
278 |
+
"\n",
|
279 |
+
" accuracy 0.80 606\n",
|
280 |
+
" macro avg 0.80 0.80 0.80 606\n",
|
281 |
+
"weighted avg 0.80 0.80 0.80 606\n",
|
282 |
+
"\n"
|
283 |
+
]
|
284 |
+
}
|
285 |
+
]
|
286 |
+
}
|
287 |
+
]
|
288 |
+
}
|