Upload 3 files
Browse files
model_code/dataset_loader.py
ADDED
@@ -0,0 +1,58 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
from torch.utils.data import Dataset
|
3 |
+
|
4 |
+
import json
|
5 |
+
import os
|
6 |
+
import collections
|
7 |
+
|
8 |
+
class IntentDataset(Dataset):
|
9 |
+
def __init__(self, loc, tokenizer, mode, toy=False, max_length=180):
|
10 |
+
'''
|
11 |
+
You can fine-tune a model with your own data!! Feel free to create (or collect!) your own utterances
|
12 |
+
and give it a shot!
|
13 |
+
|
14 |
+
|
15 |
+
|
16 |
+
loc: relative directory where the data lies
|
17 |
+
tokenizer: huggingface tokenizer to preprocess utterances
|
18 |
+
mode: one of train, val, test (should match the respective *.json files)
|
19 |
+
toy: load a very small amount of data (for debugging purposes)
|
20 |
+
max_length:max length of tokenized input
|
21 |
+
'''
|
22 |
+
self.tokenizer = tokenizer
|
23 |
+
self.mode = mode
|
24 |
+
self.max_length=max_length
|
25 |
+
|
26 |
+
with open(os.path.join(loc, 'all_intents.json'), 'r') as all_intents_json:
|
27 |
+
self.all_intents = json.load(all_intents_json) # contains the written out names of intents. also implicitly
|
28 |
+
# defines how many intents your chatbot's neural intent detection will support
|
29 |
+
|
30 |
+
with open(os.path.join(loc, mode + '.json'), 'r') as json_data:
|
31 |
+
self.all_data = json.load(json_data)
|
32 |
+
|
33 |
+
if toy:
|
34 |
+
self.all_data = self.all_data[:10]
|
35 |
+
|
36 |
+
print(f"Loaded Intent detection dataset. {len(self.all_data)} examples. ({mode}). {'Toy example' if toy else ''}")
|
37 |
+
|
38 |
+
def __len__(self): # torch Datasets need a __len__ method and __getitem__, with len as the total amount of examples...
|
39 |
+
return len(self.all_data)
|
40 |
+
|
41 |
+
def __getitem__(self, index): #... and __getitem__ as a way to get an example given an index >= 0 and < __len__
|
42 |
+
data_item = self.all_data[index]
|
43 |
+
|
44 |
+
if len(data_item) == 3:
|
45 |
+
tokenized_input = self.tokenizer(data_item[0], data_item[1], return_tensors='pt', padding='max_length', truncation=True, max_length=self.max_length)
|
46 |
+
else:
|
47 |
+
tokenized_input = self.tokenizer(data_item[0], return_tensors='pt', padding='max_length', truncation=True, max_length=self.max_length)
|
48 |
+
|
49 |
+
output_item = {
|
50 |
+
'input_ids': tokenized_input['input_ids'].squeeze(0),
|
51 |
+
'attention_mask': tokenized_input['attention_mask'].squeeze(0),
|
52 |
+
'label': torch.tensor(self.all_intents.index(data_item[-1]))
|
53 |
+
}
|
54 |
+
if 'token_type_ids' in tokenized_input:
|
55 |
+
output_item['token_type_ids'] = tokenized_input['token_type_ids'].squeeze(0),
|
56 |
+
return output_item
|
57 |
+
|
58 |
+
|
model_code/intent-detection-example.ipynb
ADDED
@@ -0,0 +1,94 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cells": [
|
3 |
+
{
|
4 |
+
"cell_type": "code",
|
5 |
+
"execution_count": 19,
|
6 |
+
"id": "297ea6c7-1eae-47fc-8fa3-d49e6d3deb6c",
|
7 |
+
"metadata": {},
|
8 |
+
"outputs": [],
|
9 |
+
"source": [
|
10 |
+
"import json\n",
|
11 |
+
"import torch\n",
|
12 |
+
"from transformers import AutoModelForSequenceClassification, AutoTokenizer"
|
13 |
+
]
|
14 |
+
},
|
15 |
+
{
|
16 |
+
"cell_type": "code",
|
17 |
+
"execution_count": 4,
|
18 |
+
"id": "45ccf708-2a0b-43a1-bf5f-45294ab205d4",
|
19 |
+
"metadata": {},
|
20 |
+
"outputs": [],
|
21 |
+
"source": [
|
22 |
+
"with open(\"twiz-data/all_intents.json\", 'r') as json_in:\n",
|
23 |
+
" data = json.load(json_in)"
|
24 |
+
]
|
25 |
+
},
|
26 |
+
{
|
27 |
+
"cell_type": "code",
|
28 |
+
"execution_count": 7,
|
29 |
+
"id": "d9875b16-36f8-4289-9ddf-6907f74a975c",
|
30 |
+
"metadata": {},
|
31 |
+
"outputs": [],
|
32 |
+
"source": [
|
33 |
+
"id_to_intent, intent_to_id = dict(), dict()\n",
|
34 |
+
"for i, intent in enumerate(data):\n",
|
35 |
+
" id_to_intent[i] = intent\n",
|
36 |
+
" intent_to_id[intent] = i"
|
37 |
+
]
|
38 |
+
},
|
39 |
+
{
|
40 |
+
"cell_type": "code",
|
41 |
+
"execution_count": 13,
|
42 |
+
"id": "01a87f85-e4d7-454c-b645-bf252161d458",
|
43 |
+
"metadata": {},
|
44 |
+
"outputs": [],
|
45 |
+
"source": [
|
46 |
+
"model = AutoModelForSequenceClassification.from_pretrained(\"roberta-based/checkpoint-925\", num_labels=len(data), id2label=id_to_intent, label2id=intent_to_id)\n",
|
47 |
+
"tokenizer = AutoTokenizer.from_pretrained(\"tokenizer\")"
|
48 |
+
]
|
49 |
+
},
|
50 |
+
{
|
51 |
+
"cell_type": "code",
|
52 |
+
"execution_count": 21,
|
53 |
+
"id": "f29489cf-fa4b-453e-8922-6e972db1cc7c",
|
54 |
+
"metadata": {},
|
55 |
+
"outputs": [
|
56 |
+
{
|
57 |
+
"name": "stdout",
|
58 |
+
"output_type": "stream",
|
59 |
+
"text": [
|
60 |
+
"NextStepIntent\n"
|
61 |
+
]
|
62 |
+
}
|
63 |
+
],
|
64 |
+
"source": [
|
65 |
+
"model_in = tokenizer(\"I really really wanna go to the next step\", return_tensors='pt')\n",
|
66 |
+
"with torch.no_grad():\n",
|
67 |
+
" logits = model(**model_in).logits\n",
|
68 |
+
" predicted_class_id = logits.argmax().item()\n",
|
69 |
+
" print(model.config.id2label[predicted_class_id])\n"
|
70 |
+
]
|
71 |
+
}
|
72 |
+
],
|
73 |
+
"metadata": {
|
74 |
+
"kernelspec": {
|
75 |
+
"display_name": "ws2024",
|
76 |
+
"language": "python",
|
77 |
+
"name": "ws2024"
|
78 |
+
},
|
79 |
+
"language_info": {
|
80 |
+
"codemirror_mode": {
|
81 |
+
"name": "ipython",
|
82 |
+
"version": 3
|
83 |
+
},
|
84 |
+
"file_extension": ".py",
|
85 |
+
"mimetype": "text/x-python",
|
86 |
+
"name": "python",
|
87 |
+
"nbconvert_exporter": "python",
|
88 |
+
"pygments_lexer": "ipython3",
|
89 |
+
"version": "3.10.14"
|
90 |
+
}
|
91 |
+
},
|
92 |
+
"nbformat": 4,
|
93 |
+
"nbformat_minor": 5
|
94 |
+
}
|
model_code/intent-detection-training.ipynb
ADDED
@@ -0,0 +1,392 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cells": [
|
3 |
+
{
|
4 |
+
"cell_type": "code",
|
5 |
+
"execution_count": 1,
|
6 |
+
"id": "5ea2cd46-5e4c-453c-bbef-69f3b3411765",
|
7 |
+
"metadata": {},
|
8 |
+
"outputs": [
|
9 |
+
{
|
10 |
+
"name": "stderr",
|
11 |
+
"output_type": "stream",
|
12 |
+
"text": [
|
13 |
+
"/user/home/dc.tavares/.conda/envs/ws2024/lib/python3.10/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
|
14 |
+
" from .autonotebook import tqdm as notebook_tqdm\n"
|
15 |
+
]
|
16 |
+
}
|
17 |
+
],
|
18 |
+
"source": [
|
19 |
+
"import os\n",
|
20 |
+
"import numpy as np\n",
|
21 |
+
"\n",
|
22 |
+
"# import transformers\n",
|
23 |
+
"from transformers import (\n",
|
24 |
+
" AutoModelForSequenceClassification,\n",
|
25 |
+
" AutoTokenizer,\n",
|
26 |
+
" Trainer,\n",
|
27 |
+
" TrainingArguments,\n",
|
28 |
+
")\n",
|
29 |
+
"from datasets import load_metric\n",
|
30 |
+
"\n",
|
31 |
+
"from dataset_loader import IntentDataset"
|
32 |
+
]
|
33 |
+
},
|
34 |
+
{
|
35 |
+
"cell_type": "code",
|
36 |
+
"execution_count": 2,
|
37 |
+
"id": "dd7d77de-a96c-43da-973e-9185e596ecd5",
|
38 |
+
"metadata": {},
|
39 |
+
"outputs": [],
|
40 |
+
"source": [
|
41 |
+
"# transformers.logging.set_verbosity_info()\n",
|
42 |
+
"# transformers.logging.set_verbosity_error() \n",
|
43 |
+
"# We set the verbosity to error to avoid the annoying huggingface warnings \n",
|
44 |
+
"# when loading models before training them. If you're having trouble getting things to work\n",
|
45 |
+
"# maybe comment that line (setting the verbosity to info also may lead to interesting outputs!)\n",
|
46 |
+
"# os.environ['TOKENIZERS_PARALLELISM'] = \"false\" # trainer (?) was complaining about parallel tokenization\n",
|
47 |
+
"# os.environ[\"WANDB_DISABLED\"] = \"true\" # trainer was complaining about wandb"
|
48 |
+
]
|
49 |
+
},
|
50 |
+
{
|
51 |
+
"cell_type": "code",
|
52 |
+
"execution_count": 3,
|
53 |
+
"id": "1d62015d-faa8-452f-a1bd-63da4f88b90f",
|
54 |
+
"metadata": {},
|
55 |
+
"outputs": [
|
56 |
+
{
|
57 |
+
"name": "stderr",
|
58 |
+
"output_type": "stream",
|
59 |
+
"text": [
|
60 |
+
"/user/home/dc.tavares/.conda/envs/ws2024/lib/python3.10/site-packages/huggingface_hub/file_download.py:1132: FutureWarning: `resume_download` is deprecated and will be removed in version 1.0.0. Downloads always resume when possible. If you want to force a new download, use `force_download=True`.\n",
|
61 |
+
" warnings.warn(\n"
|
62 |
+
]
|
63 |
+
}
|
64 |
+
],
|
65 |
+
"source": [
|
66 |
+
"model_checkpoint_name = 'roberta-base' # try 'bert-base-uncased', 'bert-base-cased', 'bert-large-uncased'\n",
|
67 |
+
"dataset_name = 'twiz-data' # rename to your dataset dir\n",
|
68 |
+
"tokenizer = AutoTokenizer.from_pretrained(model_checkpoint_name) # loads a tokenizer\n",
|
69 |
+
"tokenizer.save_pretrained(\"tokenizer\")"
|
70 |
+
]
|
71 |
+
},
|
72 |
+
{
|
73 |
+
"cell_type": "code",
|
74 |
+
"execution_count": 4,
|
75 |
+
"id": "0d97d9ef-7412-402e-92cb-cf4c666e2cdb",
|
76 |
+
"metadata": {},
|
77 |
+
"outputs": [
|
78 |
+
{
|
79 |
+
"name": "stdout",
|
80 |
+
"output_type": "stream",
|
81 |
+
"text": [
|
82 |
+
"Loaded Intent detection dataset. 5916 examples. (train). \n",
|
83 |
+
"Loaded Intent detection dataset. 819 examples. (val). \n"
|
84 |
+
]
|
85 |
+
},
|
86 |
+
{
|
87 |
+
"name": "stderr",
|
88 |
+
"output_type": "stream",
|
89 |
+
"text": [
|
90 |
+
"Some weights of RobertaForSequenceClassification were not initialized from the model checkpoint at roberta-base and are newly initialized: ['classifier.dense.bias', 'classifier.dense.weight', 'classifier.out_proj.bias', 'classifier.out_proj.weight']\n",
|
91 |
+
"You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n"
|
92 |
+
]
|
93 |
+
}
|
94 |
+
],
|
95 |
+
"source": [
|
96 |
+
"train_dataset = IntentDataset(dataset_name, tokenizer, 'train') # check twiz_dataset.py for dataset loading code\n",
|
97 |
+
"val_dataset = IntentDataset(dataset_name, tokenizer, 'val')\n",
|
98 |
+
"\n",
|
99 |
+
"model = AutoModelForSequenceClassification.from_pretrained(model_checkpoint_name, num_labels=len(train_dataset.all_intents)) # Loads the BERT model weights"
|
100 |
+
]
|
101 |
+
},
|
102 |
+
{
|
103 |
+
"cell_type": "code",
|
104 |
+
"execution_count": 10,
|
105 |
+
"id": "14adcad7-37ea-480d-85f4-f69e2ea1d431",
|
106 |
+
"metadata": {},
|
107 |
+
"outputs": [
|
108 |
+
{
|
109 |
+
"name": "stdout",
|
110 |
+
"output_type": "stream",
|
111 |
+
"text": [
|
112 |
+
"All data keys: dict_keys(['input_ids', 'attention_mask', 'label'])\n",
|
113 |
+
"tensor([ 0, 6715, 28, 7316, 77, 634, 143, 3270, 50, 2104,\n",
|
114 |
+
" 4, 9427, 6, 1078, 78, 328, 1398, 16, 103, 335,\n",
|
115 |
+
" 59, 26157, 8, 42446, 11182, 102, 4, 85, 34, 10,\n",
|
116 |
+
" 204, 4, 398, 999, 691, 4, 1437, 85, 16, 2319,\n",
|
117 |
+
" 7, 185, 59, 1718, 728, 479, 85, 4542, 204, 4,\n",
|
118 |
+
" 3139, 9600, 672, 16, 18609, 4, 1437, 318, 42, 16,\n",
|
119 |
+
" 45, 1341, 99, 47, 32, 546, 13, 224, 6, 213,\n",
|
120 |
+
" 124, 4, 598, 535, 5, 3685, 6, 95, 224, 6,\n",
|
121 |
+
" 311, 7075, 4, 2, 2, 12005, 7075, 2, 1, 1,\n",
|
122 |
+
" 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n",
|
123 |
+
" 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n",
|
124 |
+
" 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n",
|
125 |
+
" 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n",
|
126 |
+
" 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n",
|
127 |
+
" 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n",
|
128 |
+
" 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n",
|
129 |
+
" 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n",
|
130 |
+
" 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]) torch.Size([180])\n"
|
131 |
+
]
|
132 |
+
},
|
133 |
+
{
|
134 |
+
"data": {
|
135 |
+
"text/plain": [
|
136 |
+
"(tensor(29), 'IngredientsConfirmationIntent')"
|
137 |
+
]
|
138 |
+
},
|
139 |
+
"execution_count": 10,
|
140 |
+
"metadata": {},
|
141 |
+
"output_type": "execute_result"
|
142 |
+
}
|
143 |
+
],
|
144 |
+
"source": [
|
145 |
+
"inspect_index = 0\n",
|
146 |
+
"print('All data keys:', train_dataset[inspect_index].keys())\n",
|
147 |
+
"print(train_dataset[inspect_index]['input_ids'], train_dataset[inspect_index]['input_ids'].shape)\n",
|
148 |
+
"# you can check the correspondence of a label by checking the all_intents attribute, as such:\n",
|
149 |
+
"train_dataset[inspect_index]['label'], train_dataset.all_intents[train_dataset[inspect_index]['label']]"
|
150 |
+
]
|
151 |
+
},
|
152 |
+
{
|
153 |
+
"cell_type": "code",
|
154 |
+
"execution_count": 6,
|
155 |
+
"id": "efd44ee5-19fa-434b-b187-b2b219b0f472",
|
156 |
+
"metadata": {},
|
157 |
+
"outputs": [
|
158 |
+
{
|
159 |
+
"name": "stderr",
|
160 |
+
"output_type": "stream",
|
161 |
+
"text": [
|
162 |
+
"/tmp/ipykernel_432924/3219055009.py:1: FutureWarning: load_metric is deprecated and will be removed in the next major version of datasets. Use 'evaluate.load' instead, from the new library 🤗 Evaluate: https://huggingface.co/docs/evaluate\n",
|
163 |
+
" acc = load_metric('accuracy')\n",
|
164 |
+
"/user/home/dc.tavares/.conda/envs/ws2024/lib/python3.10/site-packages/datasets/load.py:759: FutureWarning: The repository for accuracy contains custom code which must be executed to correctly load the metric. You can inspect the repository content at https://raw.githubusercontent.com/huggingface/datasets/2.19.1/metrics/accuracy/accuracy.py\n",
|
165 |
+
"You can avoid this message in future by passing the argument `trust_remote_code=True`.\n",
|
166 |
+
"Passing `trust_remote_code=True` will be mandatory to load this metric from the next major release of `datasets`.\n",
|
167 |
+
" warnings.warn(\n",
|
168 |
+
"Using the latest cached version of the module from /user/home/dc.tavares/.cache/huggingface/modules/datasets_modules/metrics/accuracy/bbddc2dafac9b46b0aeeb39c145af710c55e03b223eae89dfe86388f40d9d157 (last modified on Wed May 18 17:06:59 2022) since it couldn't be found locally at accuracy, or remotely on the Hugging Face Hub.\n"
|
169 |
+
]
|
170 |
+
}
|
171 |
+
],
|
172 |
+
"source": [
|
173 |
+
"acc = load_metric('accuracy')\n",
|
174 |
+
"def compute_metrics(eval_pred):\n",
|
175 |
+
" logits, labels = eval_pred\n",
|
176 |
+
" predictions = np.argmax(logits, axis=-1)\n",
|
177 |
+
" accuracy = acc.compute(predictions=predictions, references=labels)\n",
|
178 |
+
" return accuracy\n",
|
179 |
+
"\n",
|
180 |
+
"def get_trainer(model):\n",
|
181 |
+
" return Trainer(\n",
|
182 |
+
" model=model,\n",
|
183 |
+
" args=training_args,\n",
|
184 |
+
" train_dataset=train_dataset,\n",
|
185 |
+
" eval_dataset=val_dataset,\n",
|
186 |
+
" compute_metrics=compute_metrics,\n",
|
187 |
+
" )\n",
|
188 |
+
"\n",
|
189 |
+
"training_args = TrainingArguments(\n",
|
190 |
+
" output_dir='roberta-based',\n",
|
191 |
+
" do_train=True,\n",
|
192 |
+
" do_eval=True,\n",
|
193 |
+
" evaluation_strategy='epoch',\n",
|
194 |
+
" save_strategy='epoch',\n",
|
195 |
+
" logging_strategy='epoch',\n",
|
196 |
+
" metric_for_best_model='accuracy',\n",
|
197 |
+
" learning_rate=2e-5,\n",
|
198 |
+
" num_train_epochs=5,\n",
|
199 |
+
" weight_decay=0.01,\n",
|
200 |
+
" per_device_train_batch_size=32,\n",
|
201 |
+
" per_device_eval_batch_size=32,\n",
|
202 |
+
" load_best_model_at_end=True,\n",
|
203 |
+
" disable_tqdm=False,\n",
|
204 |
+
")\n",
|
205 |
+
"\n",
|
206 |
+
"trainer = get_trainer(model)"
|
207 |
+
]
|
208 |
+
},
|
209 |
+
{
|
210 |
+
"cell_type": "code",
|
211 |
+
"execution_count": 7,
|
212 |
+
"id": "4246f805-195b-47dd-9216-9eb5a3a0bcac",
|
213 |
+
"metadata": {},
|
214 |
+
"outputs": [
|
215 |
+
{
|
216 |
+
"data": {
|
217 |
+
"text/html": [
|
218 |
+
"\n",
|
219 |
+
" <div>\n",
|
220 |
+
" \n",
|
221 |
+
" <progress value='925' max='925' style='width:300px; height:20px; vertical-align: middle;'></progress>\n",
|
222 |
+
" [925/925 08:34, Epoch 5/5]\n",
|
223 |
+
" </div>\n",
|
224 |
+
" <table border=\"1\" class=\"dataframe\">\n",
|
225 |
+
" <thead>\n",
|
226 |
+
" <tr style=\"text-align: left;\">\n",
|
227 |
+
" <th>Epoch</th>\n",
|
228 |
+
" <th>Training Loss</th>\n",
|
229 |
+
" <th>Validation Loss</th>\n",
|
230 |
+
" <th>Accuracy</th>\n",
|
231 |
+
" </tr>\n",
|
232 |
+
" </thead>\n",
|
233 |
+
" <tbody>\n",
|
234 |
+
" <tr>\n",
|
235 |
+
" <td>1</td>\n",
|
236 |
+
" <td>1.733200</td>\n",
|
237 |
+
" <td>1.017632</td>\n",
|
238 |
+
" <td>0.799756</td>\n",
|
239 |
+
" </tr>\n",
|
240 |
+
" <tr>\n",
|
241 |
+
" <td>2</td>\n",
|
242 |
+
" <td>0.676700</td>\n",
|
243 |
+
" <td>0.734118</td>\n",
|
244 |
+
" <td>0.829060</td>\n",
|
245 |
+
" </tr>\n",
|
246 |
+
" <tr>\n",
|
247 |
+
" <td>3</td>\n",
|
248 |
+
" <td>0.446900</td>\n",
|
249 |
+
" <td>0.668322</td>\n",
|
250 |
+
" <td>0.847375</td>\n",
|
251 |
+
" </tr>\n",
|
252 |
+
" <tr>\n",
|
253 |
+
" <td>4</td>\n",
|
254 |
+
" <td>0.343500</td>\n",
|
255 |
+
" <td>0.640882</td>\n",
|
256 |
+
" <td>0.852259</td>\n",
|
257 |
+
" </tr>\n",
|
258 |
+
" <tr>\n",
|
259 |
+
" <td>5</td>\n",
|
260 |
+
" <td>0.282900</td>\n",
|
261 |
+
" <td>0.641061</td>\n",
|
262 |
+
" <td>0.857143</td>\n",
|
263 |
+
" </tr>\n",
|
264 |
+
" </tbody>\n",
|
265 |
+
"</table><p>"
|
266 |
+
],
|
267 |
+
"text/plain": [
|
268 |
+
"<IPython.core.display.HTML object>"
|
269 |
+
]
|
270 |
+
},
|
271 |
+
"metadata": {},
|
272 |
+
"output_type": "display_data"
|
273 |
+
},
|
274 |
+
{
|
275 |
+
"data": {
|
276 |
+
"text/plain": [
|
277 |
+
"TrainOutput(global_step=925, training_loss=0.6966540857263513, metrics={'train_runtime': 515.0261, 'train_samples_per_second': 57.434, 'train_steps_per_second': 1.796, 'total_flos': 2736984690806400.0, 'train_loss': 0.6966540857263513, 'epoch': 5.0})"
|
278 |
+
]
|
279 |
+
},
|
280 |
+
"execution_count": 7,
|
281 |
+
"metadata": {},
|
282 |
+
"output_type": "execute_result"
|
283 |
+
}
|
284 |
+
],
|
285 |
+
"source": [
|
286 |
+
"trainer.train()"
|
287 |
+
]
|
288 |
+
},
|
289 |
+
{
|
290 |
+
"cell_type": "code",
|
291 |
+
"execution_count": 9,
|
292 |
+
"id": "2838862d-fd04-46d6-a3a3-614bd09edb99",
|
293 |
+
"metadata": {},
|
294 |
+
"outputs": [
|
295 |
+
{
|
296 |
+
"name": "stdout",
|
297 |
+
"output_type": "stream",
|
298 |
+
"text": [
|
299 |
+
"Loaded Intent detection dataset. 842 examples. (test). \n"
|
300 |
+
]
|
301 |
+
},
|
302 |
+
{
|
303 |
+
"data": {
|
304 |
+
"text/html": [
|
305 |
+
"\n",
|
306 |
+
" <div>\n",
|
307 |
+
" \n",
|
308 |
+
" <progress value='27' max='27' style='width:300px; height:20px; vertical-align: middle;'></progress>\n",
|
309 |
+
" [27/27 00:04]\n",
|
310 |
+
" </div>\n",
|
311 |
+
" "
|
312 |
+
],
|
313 |
+
"text/plain": [
|
314 |
+
"<IPython.core.display.HTML object>"
|
315 |
+
]
|
316 |
+
},
|
317 |
+
"metadata": {},
|
318 |
+
"output_type": "display_data"
|
319 |
+
},
|
320 |
+
{
|
321 |
+
"ename": "ConnectionError",
|
322 |
+
"evalue": "(MaxRetryError('HTTPSConnectionPool(host=\\'huggingface.co\\', port=443): Max retries exceeded with url: /api/repos/create (Caused by NameResolutionError(\"<urllib3.connection.HTTPSConnection object at 0x7fd2023513c0>: Failed to resolve \\'huggingface.co\\' ([Errno -3] Temporary failure in name resolution)\"))'), '(Request ID: 893f7cae-38f8-4513-ba1d-a7c8dd3db7c8)')",
|
323 |
+
"output_type": "error",
|
324 |
+
"traceback": [
|
325 |
+
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
|
326 |
+
"\u001b[0;31mgaierror\u001b[0m Traceback (most recent call last)",
|
327 |
+
"File \u001b[0;32m~/.conda/envs/ws2024/lib/python3.10/site-packages/urllib3/connection.py:198\u001b[0m, in \u001b[0;36mHTTPConnection._new_conn\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 197\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 198\u001b[0m sock \u001b[38;5;241m=\u001b[39m \u001b[43mconnection\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcreate_connection\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 199\u001b[0m \u001b[43m \u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_dns_host\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mport\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 200\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mtimeout\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 201\u001b[0m \u001b[43m \u001b[49m\u001b[43msource_address\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msource_address\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 202\u001b[0m \u001b[43m \u001b[49m\u001b[43msocket_options\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msocket_options\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 203\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 204\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m socket\u001b[38;5;241m.\u001b[39mgaierror \u001b[38;5;28;01mas\u001b[39;00m e:\n",
|
328 |
+
"File \u001b[0;32m~/.conda/envs/ws2024/lib/python3.10/site-packages/urllib3/util/connection.py:60\u001b[0m, in \u001b[0;36mcreate_connection\u001b[0;34m(address, timeout, source_address, socket_options)\u001b[0m\n\u001b[1;32m 58\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m LocationParseError(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mhost\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m, label empty or too long\u001b[39m\u001b[38;5;124m\"\u001b[39m) \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m---> 60\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m res \u001b[38;5;129;01min\u001b[39;00m \u001b[43msocket\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mgetaddrinfo\u001b[49m\u001b[43m(\u001b[49m\u001b[43mhost\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mport\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mfamily\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43msocket\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mSOCK_STREAM\u001b[49m\u001b[43m)\u001b[49m:\n\u001b[1;32m 61\u001b[0m af, socktype, proto, canonname, sa \u001b[38;5;241m=\u001b[39m res\n",
|
329 |
+
"File \u001b[0;32m~/.conda/envs/ws2024/lib/python3.10/socket.py:955\u001b[0m, in \u001b[0;36mgetaddrinfo\u001b[0;34m(host, port, family, type, proto, flags)\u001b[0m\n\u001b[1;32m 954\u001b[0m addrlist \u001b[38;5;241m=\u001b[39m []\n\u001b[0;32m--> 955\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m res \u001b[38;5;129;01min\u001b[39;00m \u001b[43m_socket\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mgetaddrinfo\u001b[49m\u001b[43m(\u001b[49m\u001b[43mhost\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mport\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mfamily\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mtype\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mproto\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mflags\u001b[49m\u001b[43m)\u001b[49m:\n\u001b[1;32m 956\u001b[0m af, socktype, proto, canonname, sa \u001b[38;5;241m=\u001b[39m res\n",
|
330 |
+
"\u001b[0;31mgaierror\u001b[0m: [Errno -3] Temporary failure in name resolution",
|
331 |
+
"\nThe above exception was the direct cause of the following exception:\n",
|
332 |
+
"\u001b[0;31mNameResolutionError\u001b[0m Traceback (most recent call last)",
|
333 |
+
"File \u001b[0;32m~/.conda/envs/ws2024/lib/python3.10/site-packages/urllib3/connectionpool.py:793\u001b[0m, in \u001b[0;36mHTTPConnectionPool.urlopen\u001b[0;34m(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, chunked, body_pos, preload_content, decode_content, **response_kw)\u001b[0m\n\u001b[1;32m 792\u001b[0m \u001b[38;5;66;03m# Make the request on the HTTPConnection object\u001b[39;00m\n\u001b[0;32m--> 793\u001b[0m response \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_make_request\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 794\u001b[0m \u001b[43m \u001b[49m\u001b[43mconn\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 795\u001b[0m \u001b[43m \u001b[49m\u001b[43mmethod\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 796\u001b[0m \u001b[43m \u001b[49m\u001b[43murl\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 797\u001b[0m \u001b[43m \u001b[49m\u001b[43mtimeout\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtimeout_obj\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 798\u001b[0m \u001b[43m \u001b[49m\u001b[43mbody\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbody\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 799\u001b[0m \u001b[43m \u001b[49m\u001b[43mheaders\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mheaders\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 800\u001b[0m \u001b[43m \u001b[49m\u001b[43mchunked\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mchunked\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 801\u001b[0m \u001b[43m \u001b[49m\u001b[43mretries\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mretries\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 802\u001b[0m \u001b[43m \u001b[49m\u001b[43mresponse_conn\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mresponse_conn\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 803\u001b[0m \u001b[43m \u001b[49m\u001b[43mpreload_content\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mpreload_content\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 804\u001b[0m \u001b[43m \u001b[49m\u001b[43mdecode_content\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdecode_content\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 805\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mresponse_kw\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 806\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 808\u001b[0m \u001b[38;5;66;03m# Everything went great!\u001b[39;00m\n",
|
334 |
+
"File \u001b[0;32m~/.conda/envs/ws2024/lib/python3.10/site-packages/urllib3/connectionpool.py:491\u001b[0m, in \u001b[0;36mHTTPConnectionPool._make_request\u001b[0;34m(self, conn, method, url, body, headers, retries, timeout, chunked, response_conn, preload_content, decode_content, enforce_content_length)\u001b[0m\n\u001b[1;32m 490\u001b[0m new_e \u001b[38;5;241m=\u001b[39m _wrap_proxy_error(new_e, conn\u001b[38;5;241m.\u001b[39mproxy\u001b[38;5;241m.\u001b[39mscheme)\n\u001b[0;32m--> 491\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m new_e\n\u001b[1;32m 493\u001b[0m \u001b[38;5;66;03m# conn.request() calls http.client.*.request, not the method in\u001b[39;00m\n\u001b[1;32m 494\u001b[0m \u001b[38;5;66;03m# urllib3.request. It also calls makefile (recv) on the socket.\u001b[39;00m\n",
|
335 |
+
"File \u001b[0;32m~/.conda/envs/ws2024/lib/python3.10/site-packages/urllib3/connectionpool.py:467\u001b[0m, in \u001b[0;36mHTTPConnectionPool._make_request\u001b[0;34m(self, conn, method, url, body, headers, retries, timeout, chunked, response_conn, preload_content, decode_content, enforce_content_length)\u001b[0m\n\u001b[1;32m 466\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 467\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_validate_conn\u001b[49m\u001b[43m(\u001b[49m\u001b[43mconn\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 468\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m (SocketTimeout, BaseSSLError) \u001b[38;5;28;01mas\u001b[39;00m e:\n",
|
336 |
+
"File \u001b[0;32m~/.conda/envs/ws2024/lib/python3.10/site-packages/urllib3/connectionpool.py:1099\u001b[0m, in \u001b[0;36mHTTPSConnectionPool._validate_conn\u001b[0;34m(self, conn)\u001b[0m\n\u001b[1;32m 1098\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m conn\u001b[38;5;241m.\u001b[39mis_closed:\n\u001b[0;32m-> 1099\u001b[0m \u001b[43mconn\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mconnect\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1101\u001b[0m \u001b[38;5;66;03m# TODO revise this, see https://github.com/urllib3/urllib3/issues/2791\u001b[39;00m\n",
|
337 |
+
"File \u001b[0;32m~/.conda/envs/ws2024/lib/python3.10/site-packages/urllib3/connection.py:616\u001b[0m, in \u001b[0;36mHTTPSConnection.connect\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 615\u001b[0m sock: socket\u001b[38;5;241m.\u001b[39msocket \u001b[38;5;241m|\u001b[39m ssl\u001b[38;5;241m.\u001b[39mSSLSocket\n\u001b[0;32m--> 616\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39msock \u001b[38;5;241m=\u001b[39m sock \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_new_conn\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 617\u001b[0m server_hostname: \u001b[38;5;28mstr\u001b[39m \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mhost\n",
|
338 |
+
"File \u001b[0;32m~/.conda/envs/ws2024/lib/python3.10/site-packages/urllib3/connection.py:205\u001b[0m, in \u001b[0;36mHTTPConnection._new_conn\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 204\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m socket\u001b[38;5;241m.\u001b[39mgaierror \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[0;32m--> 205\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m NameResolutionError(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mhost, \u001b[38;5;28mself\u001b[39m, e) \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01me\u001b[39;00m\n\u001b[1;32m 206\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m SocketTimeout \u001b[38;5;28;01mas\u001b[39;00m e:\n",
|
339 |
+
"\u001b[0;31mNameResolutionError\u001b[0m: <urllib3.connection.HTTPSConnection object at 0x7fd2023513c0>: Failed to resolve 'huggingface.co' ([Errno -3] Temporary failure in name resolution)",
|
340 |
+
"\nThe above exception was the direct cause of the following exception:\n",
|
341 |
+
"\u001b[0;31mMaxRetryError\u001b[0m Traceback (most recent call last)",
|
342 |
+
"File \u001b[0;32m~/.conda/envs/ws2024/lib/python3.10/site-packages/requests/adapters.py:486\u001b[0m, in \u001b[0;36mHTTPAdapter.send\u001b[0;34m(self, request, stream, timeout, verify, cert, proxies)\u001b[0m\n\u001b[1;32m 485\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 486\u001b[0m resp \u001b[38;5;241m=\u001b[39m \u001b[43mconn\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43murlopen\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 487\u001b[0m \u001b[43m \u001b[49m\u001b[43mmethod\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mrequest\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmethod\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 488\u001b[0m \u001b[43m \u001b[49m\u001b[43murl\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43murl\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 489\u001b[0m \u001b[43m \u001b[49m\u001b[43mbody\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mrequest\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbody\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 490\u001b[0m \u001b[43m \u001b[49m\u001b[43mheaders\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mrequest\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mheaders\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 491\u001b[0m \u001b[43m \u001b[49m\u001b[43mredirect\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mFalse\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[1;32m 492\u001b[0m \u001b[43m \u001b[49m\u001b[43massert_same_host\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mFalse\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[1;32m 493\u001b[0m \u001b[43m \u001b[49m\u001b[43mpreload_content\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mFalse\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[1;32m 494\u001b[0m \u001b[43m \u001b[49m\u001b[43mdecode_content\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mFalse\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[1;32m 495\u001b[0m \u001b[43m \u001b[49m\u001b[43mretries\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmax_retries\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 496\u001b[0m \u001b[43m \u001b[49m\u001b[43mtimeout\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtimeout\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 497\u001b[0m \u001b[43m \u001b[49m\u001b[43mchunked\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mchunked\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 498\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 500\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m (ProtocolError, \u001b[38;5;167;01mOSError\u001b[39;00m) \u001b[38;5;28;01mas\u001b[39;00m err:\n",
|
343 |
+
"File \u001b[0;32m~/.conda/envs/ws2024/lib/python3.10/site-packages/urllib3/connectionpool.py:847\u001b[0m, in \u001b[0;36mHTTPConnectionPool.urlopen\u001b[0;34m(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, chunked, body_pos, preload_content, decode_content, **response_kw)\u001b[0m\n\u001b[1;32m 845\u001b[0m new_e \u001b[38;5;241m=\u001b[39m ProtocolError(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mConnection aborted.\u001b[39m\u001b[38;5;124m\"\u001b[39m, new_e)\n\u001b[0;32m--> 847\u001b[0m retries \u001b[38;5;241m=\u001b[39m \u001b[43mretries\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mincrement\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 848\u001b[0m \u001b[43m \u001b[49m\u001b[43mmethod\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43murl\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43merror\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mnew_e\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m_pool\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m_stacktrace\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43msys\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mexc_info\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;241;43m2\u001b[39;49m\u001b[43m]\u001b[49m\n\u001b[1;32m 849\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 850\u001b[0m retries\u001b[38;5;241m.\u001b[39msleep()\n",
|
344 |
+
"File \u001b[0;32m~/.conda/envs/ws2024/lib/python3.10/site-packages/urllib3/util/retry.py:515\u001b[0m, in \u001b[0;36mRetry.increment\u001b[0;34m(self, method, url, response, error, _pool, _stacktrace)\u001b[0m\n\u001b[1;32m 514\u001b[0m reason \u001b[38;5;241m=\u001b[39m error \u001b[38;5;129;01mor\u001b[39;00m ResponseError(cause)\n\u001b[0;32m--> 515\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m MaxRetryError(_pool, url, reason) \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mreason\u001b[39;00m \u001b[38;5;66;03m# type: ignore[arg-type]\u001b[39;00m\n\u001b[1;32m 517\u001b[0m log\u001b[38;5;241m.\u001b[39mdebug(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mIncremented Retry for (url=\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;132;01m%s\u001b[39;00m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m): \u001b[39m\u001b[38;5;132;01m%r\u001b[39;00m\u001b[38;5;124m\"\u001b[39m, url, new_retry)\n",
|
345 |
+
"\u001b[0;31mMaxRetryError\u001b[0m: HTTPSConnectionPool(host='huggingface.co', port=443): Max retries exceeded with url: /api/repos/create (Caused by NameResolutionError(\"<urllib3.connection.HTTPSConnection object at 0x7fd2023513c0>: Failed to resolve 'huggingface.co' ([Errno -3] Temporary failure in name resolution)\"))",
|
346 |
+
"\nDuring handling of the above exception, another exception occurred:\n",
|
347 |
+
"\u001b[0;31mConnectionError\u001b[0m Traceback (most recent call last)",
|
348 |
+
"Cell \u001b[0;32mIn[9], line 7\u001b[0m\n\u001b[1;32m 4\u001b[0m trainer \u001b[38;5;241m=\u001b[39m get_trainer(model)\n\u001b[1;32m 5\u001b[0m trainer\u001b[38;5;241m.\u001b[39mevaluate(eval_dataset\u001b[38;5;241m=\u001b[39mtest_dataset)\n\u001b[0;32m----> 7\u001b[0m \u001b[43mtrainer\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mpush_to_hub\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n",
|
349 |
+
"File \u001b[0;32m~/.conda/envs/ws2024/lib/python3.10/site-packages/transformers/trainer.py:4072\u001b[0m, in \u001b[0;36mTrainer.push_to_hub\u001b[0;34m(self, commit_message, blocking, token, **kwargs)\u001b[0m\n\u001b[1;32m 4070\u001b[0m \u001b[38;5;66;03m# In case the user calls this method with args.push_to_hub = False\u001b[39;00m\n\u001b[1;32m 4071\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mhub_model_id \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m-> 4072\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43minit_hf_repo\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtoken\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtoken\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 4074\u001b[0m \u001b[38;5;66;03m# Needs to be executed on all processes for TPU training, but will only save on the processed determined by\u001b[39;00m\n\u001b[1;32m 4075\u001b[0m \u001b[38;5;66;03m# self.args.should_save.\u001b[39;00m\n\u001b[1;32m 4076\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39msave_model(_internal_call\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m)\n",
|
350 |
+
"File \u001b[0;32m~/.conda/envs/ws2024/lib/python3.10/site-packages/transformers/trainer.py:3896\u001b[0m, in \u001b[0;36mTrainer.init_hf_repo\u001b[0;34m(self, token)\u001b[0m\n\u001b[1;32m 3893\u001b[0m repo_name \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39margs\u001b[38;5;241m.\u001b[39mhub_model_id\n\u001b[1;32m 3895\u001b[0m token \u001b[38;5;241m=\u001b[39m token \u001b[38;5;28;01mif\u001b[39;00m token \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39margs\u001b[38;5;241m.\u001b[39mhub_token\n\u001b[0;32m-> 3896\u001b[0m repo_url \u001b[38;5;241m=\u001b[39m \u001b[43mcreate_repo\u001b[49m\u001b[43m(\u001b[49m\u001b[43mrepo_name\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtoken\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtoken\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mprivate\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43margs\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mhub_private_repo\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mexist_ok\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m)\u001b[49m\n\u001b[1;32m 3897\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mhub_model_id \u001b[38;5;241m=\u001b[39m repo_url\u001b[38;5;241m.\u001b[39mrepo_id\n\u001b[1;32m 3898\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mpush_in_progress \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n",
|
351 |
+
"File \u001b[0;32m~/.conda/envs/ws2024/lib/python3.10/site-packages/huggingface_hub/utils/_validators.py:114\u001b[0m, in \u001b[0;36mvalidate_hf_hub_args.<locals>._inner_fn\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m 111\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m check_use_auth_token:\n\u001b[1;32m 112\u001b[0m kwargs \u001b[38;5;241m=\u001b[39m smoothly_deprecate_use_auth_token(fn_name\u001b[38;5;241m=\u001b[39mfn\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__name__\u001b[39m, has_token\u001b[38;5;241m=\u001b[39mhas_token, kwargs\u001b[38;5;241m=\u001b[39mkwargs)\n\u001b[0;32m--> 114\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfn\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n",
|
352 |
+
"File \u001b[0;32m~/.conda/envs/ws2024/lib/python3.10/site-packages/huggingface_hub/hf_api.py:3243\u001b[0m, in \u001b[0;36mHfApi.create_repo\u001b[0;34m(self, repo_id, token, private, repo_type, exist_ok, space_sdk, space_hardware, space_storage, space_sleep_time, space_secrets, space_variables)\u001b[0m\n\u001b[1;32m 3240\u001b[0m headers \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_build_hf_headers(token\u001b[38;5;241m=\u001b[39mtoken)\n\u001b[1;32m 3242\u001b[0m \u001b[38;5;28;01mwhile\u001b[39;00m \u001b[38;5;28;01mTrue\u001b[39;00m:\n\u001b[0;32m-> 3243\u001b[0m r \u001b[38;5;241m=\u001b[39m \u001b[43mget_session\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mpost\u001b[49m\u001b[43m(\u001b[49m\u001b[43mpath\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mheaders\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mheaders\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mjson\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mjson\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 3244\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m r\u001b[38;5;241m.\u001b[39mstatus_code \u001b[38;5;241m==\u001b[39m \u001b[38;5;241m409\u001b[39m \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mCannot create repo: another conflicting operation is in progress\u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;129;01min\u001b[39;00m r\u001b[38;5;241m.\u001b[39mtext:\n\u001b[1;32m 3245\u001b[0m \u001b[38;5;66;03m# Since https://github.com/huggingface/moon-landing/pull/7272 (private repo), it is not possible to\u001b[39;00m\n\u001b[1;32m 3246\u001b[0m \u001b[38;5;66;03m# concurrently create repos on the Hub for a same user. This is rarely an issue, except when running\u001b[39;00m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 3249\u001b[0m \u001b[38;5;66;03m# dependent libraries.\u001b[39;00m\n\u001b[1;32m 3250\u001b[0m \u001b[38;5;66;03m# NOTE: If a fix is implemented server-side, we should be able to remove this retry mechanism.\u001b[39;00m\n\u001b[1;32m 3251\u001b[0m logger\u001b[38;5;241m.\u001b[39mdebug(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mCreate repo failed due to a concurrency issue. Retrying...\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n",
|
353 |
+
"File \u001b[0;32m~/.conda/envs/ws2024/lib/python3.10/site-packages/requests/sessions.py:637\u001b[0m, in \u001b[0;36mSession.post\u001b[0;34m(self, url, data, json, **kwargs)\u001b[0m\n\u001b[1;32m 626\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mpost\u001b[39m(\u001b[38;5;28mself\u001b[39m, url, data\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mNone\u001b[39;00m, json\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mNone\u001b[39;00m, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs):\n\u001b[1;32m 627\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124mr\u001b[39m\u001b[38;5;124;03m\"\"\"Sends a POST request. Returns :class:`Response` object.\u001b[39;00m\n\u001b[1;32m 628\u001b[0m \n\u001b[1;32m 629\u001b[0m \u001b[38;5;124;03m :param url: URL for the new :class:`Request` object.\u001b[39;00m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 634\u001b[0m \u001b[38;5;124;03m :rtype: requests.Response\u001b[39;00m\n\u001b[1;32m 635\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[0;32m--> 637\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrequest\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mPOST\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43murl\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdata\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdata\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mjson\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mjson\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n",
|
354 |
+
"File \u001b[0;32m~/.conda/envs/ws2024/lib/python3.10/site-packages/requests/sessions.py:589\u001b[0m, in \u001b[0;36mSession.request\u001b[0;34m(self, method, url, params, data, headers, cookies, files, auth, timeout, allow_redirects, proxies, hooks, stream, verify, cert, json)\u001b[0m\n\u001b[1;32m 584\u001b[0m send_kwargs \u001b[38;5;241m=\u001b[39m {\n\u001b[1;32m 585\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mtimeout\u001b[39m\u001b[38;5;124m\"\u001b[39m: timeout,\n\u001b[1;32m 586\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mallow_redirects\u001b[39m\u001b[38;5;124m\"\u001b[39m: allow_redirects,\n\u001b[1;32m 587\u001b[0m }\n\u001b[1;32m 588\u001b[0m send_kwargs\u001b[38;5;241m.\u001b[39mupdate(settings)\n\u001b[0;32m--> 589\u001b[0m resp \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msend\u001b[49m\u001b[43m(\u001b[49m\u001b[43mprep\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43msend_kwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 591\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m resp\n",
|
355 |
+
"File \u001b[0;32m~/.conda/envs/ws2024/lib/python3.10/site-packages/requests/sessions.py:703\u001b[0m, in \u001b[0;36mSession.send\u001b[0;34m(self, request, **kwargs)\u001b[0m\n\u001b[1;32m 700\u001b[0m start \u001b[38;5;241m=\u001b[39m preferred_clock()\n\u001b[1;32m 702\u001b[0m \u001b[38;5;66;03m# Send the request\u001b[39;00m\n\u001b[0;32m--> 703\u001b[0m r \u001b[38;5;241m=\u001b[39m \u001b[43madapter\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msend\u001b[49m\u001b[43m(\u001b[49m\u001b[43mrequest\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 705\u001b[0m \u001b[38;5;66;03m# Total elapsed time of the request (approximately)\u001b[39;00m\n\u001b[1;32m 706\u001b[0m elapsed \u001b[38;5;241m=\u001b[39m preferred_clock() \u001b[38;5;241m-\u001b[39m start\n",
|
356 |
+
"File \u001b[0;32m~/.conda/envs/ws2024/lib/python3.10/site-packages/huggingface_hub/utils/_http.py:66\u001b[0m, in \u001b[0;36mUniqueRequestIdAdapter.send\u001b[0;34m(self, request, *args, **kwargs)\u001b[0m\n\u001b[1;32m 64\u001b[0m \u001b[38;5;250m\u001b[39m\u001b[38;5;124;03m\"\"\"Catch any RequestException to append request id to the error message for debugging.\"\"\"\u001b[39;00m\n\u001b[1;32m 65\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m---> 66\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43msuper\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msend\u001b[49m\u001b[43m(\u001b[49m\u001b[43mrequest\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 67\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m requests\u001b[38;5;241m.\u001b[39mRequestException \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[1;32m 68\u001b[0m request_id \u001b[38;5;241m=\u001b[39m request\u001b[38;5;241m.\u001b[39mheaders\u001b[38;5;241m.\u001b[39mget(X_AMZN_TRACE_ID)\n",
|
357 |
+
"File \u001b[0;32m~/.conda/envs/ws2024/lib/python3.10/site-packages/requests/adapters.py:519\u001b[0m, in \u001b[0;36mHTTPAdapter.send\u001b[0;34m(self, request, stream, timeout, verify, cert, proxies)\u001b[0m\n\u001b[1;32m 515\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(e\u001b[38;5;241m.\u001b[39mreason, _SSLError):\n\u001b[1;32m 516\u001b[0m \u001b[38;5;66;03m# This branch is for urllib3 v1.22 and later.\u001b[39;00m\n\u001b[1;32m 517\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m SSLError(e, request\u001b[38;5;241m=\u001b[39mrequest)\n\u001b[0;32m--> 519\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mConnectionError\u001b[39;00m(e, request\u001b[38;5;241m=\u001b[39mrequest)\n\u001b[1;32m 521\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m ClosedPoolError \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[1;32m 522\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mConnectionError\u001b[39;00m(e, request\u001b[38;5;241m=\u001b[39mrequest)\n",
|
358 |
+
"\u001b[0;31mConnectionError\u001b[0m: (MaxRetryError('HTTPSConnectionPool(host=\\'huggingface.co\\', port=443): Max retries exceeded with url: /api/repos/create (Caused by NameResolutionError(\"<urllib3.connection.HTTPSConnection object at 0x7fd2023513c0>: Failed to resolve \\'huggingface.co\\' ([Errno -3] Temporary failure in name resolution)\"))'), '(Request ID: 893f7cae-38f8-4513-ba1d-a7c8dd3db7c8)')"
|
359 |
+
]
|
360 |
+
}
|
361 |
+
],
|
362 |
+
"source": [
|
363 |
+
"# run the next cell with the next line uncommented and fill your checkpoint directory to evaluate the model\n",
|
364 |
+
"# model = AutoModelForSequenceClassification.from_pretrained('./your-checkpoint-directory').eval()\n",
|
365 |
+
"test_dataset = IntentDataset(dataset_name, tokenizer, 'test')\n",
|
366 |
+
"trainer = get_trainer(model)\n",
|
367 |
+
"trainer.evaluate(eval_dataset=test_dataset)"
|
368 |
+
]
|
369 |
+
}
|
370 |
+
],
|
371 |
+
"metadata": {
|
372 |
+
"kernelspec": {
|
373 |
+
"display_name": "ws2024",
|
374 |
+
"language": "python",
|
375 |
+
"name": "ws2024"
|
376 |
+
},
|
377 |
+
"language_info": {
|
378 |
+
"codemirror_mode": {
|
379 |
+
"name": "ipython",
|
380 |
+
"version": 3
|
381 |
+
},
|
382 |
+
"file_extension": ".py",
|
383 |
+
"mimetype": "text/x-python",
|
384 |
+
"name": "python",
|
385 |
+
"nbconvert_exporter": "python",
|
386 |
+
"pygments_lexer": "ipython3",
|
387 |
+
"version": "3.10.14"
|
388 |
+
}
|
389 |
+
},
|
390 |
+
"nbformat": 4,
|
391 |
+
"nbformat_minor": 5
|
392 |
+
}
|