Kevin Fink commited on
Commit
c7cf3c2
·
1 Parent(s): 5912a26
Files changed (1) hide show
  1. app.py +13 -12
app.py CHANGED
@@ -13,7 +13,17 @@ from peft import get_peft_model, LoraConfig
13
 
14
  os.environ['HF_HOME'] = '/data/.huggingface'
15
 
16
-
 
 
 
 
 
 
 
 
 
 
17
 
18
 
19
  def fine_tune_model(model, dataset_name, hub_id, api_key, num_epochs, batch_size, lr, grad):
@@ -135,20 +145,11 @@ def predict(text):
135
 
136
  @spaces.GPU(duration=120)
137
  def run_train(dataset_name, hub_id, api_key, num_epochs, batch_size, lr, grad):
138
- lora_config = LoraConfig(
139
- r=16, # Rank of the low-rank adaptation
140
- lora_alpha=32, # Scaling factor
141
- lora_dropout=0.1, # Dropout for LoRA layers
142
- bias="none" # Bias handling
143
- )
144
- model = AutoModelForSeq2SeqLM.from_pretrained('google/t5-efficient-tiny', num_labels=2, force_download=True)
145
- model = get_peft_model(model, lora_config)
146
- model.gradient_checkpointing_enable()
147
  result = fine_tune_model(model, dataset_name, hub_id, api_key, num_epochs, batch_size, lr, grad)
148
  return result
149
  # Create Gradio interface
150
- try:
151
- model = AutoModelForSeq2SeqLM.from_pretrained('google/t5-efficient-tiny-nh8'.strip(), num_labels=2, force_download=True)
152
  iface = gr.Interface(
153
  fn=run_train,
154
  inputs=[
 
13
 
14
  os.environ['HF_HOME'] = '/data/.huggingface'
15
 
16
+ lora_config = LoraConfig(
17
+ r=16, # Rank of the low-rank adaptation
18
+ lora_alpha=32, # Scaling factor
19
+ lora_dropout=0.1, # Dropout for LoRA layers
20
+ bias="none" # Bias handling
21
+ )
22
+ model = AutoModelForSeq2SeqLM.from_pretrained('google/t5-efficient-tiny', num_labels=2, force_download=True)
23
+ model = get_peft_model(model, lora_config)
24
+ model.gradient_checkpointing_enable()
25
+ model_save_path = '/data/lora_finetuned_model' # Specify your desired save path
26
+ model.save_pretrained(model_save_path)
27
 
28
 
29
  def fine_tune_model(model, dataset_name, hub_id, api_key, num_epochs, batch_size, lr, grad):
 
145
 
146
  @spaces.GPU(duration=120)
147
  def run_train(dataset_name, hub_id, api_key, num_epochs, batch_size, lr, grad):
148
+ model = AutoModelForSeq2SeqLM.from_pretrained('/data/lora_finetuned_model', num_labels=2)
 
 
 
 
 
 
 
 
149
  result = fine_tune_model(model, dataset_name, hub_id, api_key, num_epochs, batch_size, lr, grad)
150
  return result
151
  # Create Gradio interface
152
+ try:
 
153
  iface = gr.Interface(
154
  fn=run_train,
155
  inputs=[