Commit
·
7aa8d44
1
Parent(s):
c6076be
Update README.md
Browse files
README.md
CHANGED
@@ -25,10 +25,9 @@ import torch
|
|
25 |
from peft import PeftModel, PeftConfig
|
26 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
27 |
|
28 |
-
peft_model_id = "hackathon-somos-nlp-2023/
|
29 |
config = PeftConfig.from_pretrained(peft_model_id)
|
30 |
model = AutoModelForCausalLM.from_pretrained(config.base_model_name_or_path, return_dict=True, load_in_8bit=True, device_map='auto')
|
31 |
-
# tokenizer = AutoTokenizer.from_pretrained(config.base_model_name_or_path)
|
32 |
tokenizer = AutoTokenizer.from_pretrained(peft_model_id)
|
33 |
|
34 |
# Load the Lora model
|
|
|
25 |
from peft import PeftModel, PeftConfig
|
26 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
27 |
|
28 |
+
peft_model_id = "hackathon-somos-nlp-2023/salsapaca-native"
|
29 |
config = PeftConfig.from_pretrained(peft_model_id)
|
30 |
model = AutoModelForCausalLM.from_pretrained(config.base_model_name_or_path, return_dict=True, load_in_8bit=True, device_map='auto')
|
|
|
31 |
tokenizer = AutoTokenizer.from_pretrained(peft_model_id)
|
32 |
|
33 |
# Load the Lora model
|