Kevin Fink
commited on
Commit
·
379c443
1
Parent(s):
d202762
dev
Browse files
app.py
CHANGED
@@ -7,14 +7,14 @@ import traceback
|
|
7 |
|
8 |
|
9 |
import os
|
10 |
-
|
11 |
|
12 |
@spaces.GPU
|
13 |
def fine_tune_model(model_name, dataset_name, hub_id, api_key, num_epochs, batch_size, lr, grad):
|
14 |
try:
|
15 |
#login(api_key.strip())
|
16 |
# Load the model and tokenizer
|
17 |
-
model = AutoModelForSeq2SeqLM.from_pretrained(
|
18 |
|
19 |
|
20 |
# Set training arguments
|
@@ -45,7 +45,7 @@ def fine_tune_model(model_name, dataset_name, hub_id, api_key, num_epochs, batch
|
|
45 |
max_length = 128
|
46 |
# Load the dataset
|
47 |
dataset = load_dataset(dataset_name.strip())
|
48 |
-
tokenizer = AutoTokenizer.from_pretrained(
|
49 |
# Tokenize the dataset
|
50 |
def tokenize_function(examples):
|
51 |
|
|
|
7 |
|
8 |
|
9 |
import os
|
10 |
+
|
11 |
|
12 |
@spaces.GPU
|
13 |
def fine_tune_model(model_name, dataset_name, hub_id, api_key, num_epochs, batch_size, lr, grad):
|
14 |
try:
|
15 |
#login(api_key.strip())
|
16 |
# Load the model and tokenizer
|
17 |
+
model = AutoModelForSeq2SeqLM.from_pretrained('google/t5-efficient-tiny-nh8', num_labels=2)
|
18 |
|
19 |
|
20 |
# Set training arguments
|
|
|
45 |
max_length = 128
|
46 |
# Load the dataset
|
47 |
dataset = load_dataset(dataset_name.strip())
|
48 |
+
tokenizer = AutoTokenizer.from_pretrained('google/t5-efficient-tiny-nh8')
|
49 |
# Tokenize the dataset
|
50 |
def tokenize_function(examples):
|
51 |
|