Kevin Fink commited on
Commit
1554413
·
1 Parent(s): c7cf3c2
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -1,6 +1,6 @@
1
  import spaces
2
  import gradio as gr
3
- from transformers import Trainer, TrainingArguments, AutoTokenizer, AutoModelForSeq2SeqLM
4
  from transformers import DataCollatorForSeq2Seq
5
  from datasets import load_dataset, concatenate_datasets, load_from_disk
6
  import traceback
@@ -19,7 +19,7 @@ lora_config = LoraConfig(
19
  lora_dropout=0.1, # Dropout for LoRA layers
20
  bias="none" # Bias handling
21
  )
22
- model = AutoModelForSeq2SeqLM.from_pretrained('google/t5-efficient-tiny', num_labels=2, force_download=True)
23
  model = get_peft_model(model, lora_config)
24
  model.gradient_checkpointing_enable()
25
  model_save_path = '/data/lora_finetuned_model' # Specify your desired save path
@@ -145,7 +145,7 @@ def predict(text):
145
 
146
  @spaces.GPU(duration=120)
147
  def run_train(dataset_name, hub_id, api_key, num_epochs, batch_size, lr, grad):
148
- model = AutoModelForSeq2SeqLM.from_pretrained('/data/lora_finetuned_model', num_labels=2)
149
  result = fine_tune_model(model, dataset_name, hub_id, api_key, num_epochs, batch_size, lr, grad)
150
  return result
151
  # Create Gradio interface
 
1
  import spaces
2
  import gradio as gr
3
+ from transformers import Trainer, TrainingArguments, AutoTokenizer, TFAutoModelForSeq2SeqLM
4
  from transformers import DataCollatorForSeq2Seq
5
  from datasets import load_dataset, concatenate_datasets, load_from_disk
6
  import traceback
 
19
  lora_dropout=0.1, # Dropout for LoRA layers
20
  bias="none" # Bias handling
21
  )
22
+ model = TFAutoModelForSeq2SeqLM.from_pretrained('google/t5-efficient-tiny', num_labels=2, force_download=True)
23
  model = get_peft_model(model, lora_config)
24
  model.gradient_checkpointing_enable()
25
  model_save_path = '/data/lora_finetuned_model' # Specify your desired save path
 
145
 
146
  @spaces.GPU(duration=120)
147
  def run_train(dataset_name, hub_id, api_key, num_epochs, batch_size, lr, grad):
148
+ model = TFAutoModelForSeq2SeqLM.from_pretrained('/data/lora_finetuned_model', num_labels=2)
149
  result = fine_tune_model(model, dataset_name, hub_id, api_key, num_epochs, batch_size, lr, grad)
150
  return result
151
  # Create Gradio interface