SwastikM commited on
Commit
edb13d1
1 Parent(s): cb71146

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +3 -3
README.md CHANGED
@@ -62,9 +62,9 @@ instruction = """"Help me set up my daily to-do list!""""
62
  from peft import PeftModel, PeftConfig
63
  from transformers import AutoModelForCausalLM
64
 
65
- config = PeftConfig.from_pretrained("SwastikM/Llama-2-7B-Chat-text2code")
66
- model = AutoModelForCausalLM.from_pretrained("TheBloke/Llama-2-7b-Chat-GPTQ")
67
- model = PeftModel.from_pretrained(model, "SwastikM/Llama-2-7B-Chat-text2code")
68
  tokenizer = AutoTokenizer.from_pretrained("SwastikM/Llama-2-7B-Chat-text2code")
69
 
70
  inputs = tokenizer(instruction, return_tensors="pt").input_ids.to('cuda')
 
62
  from peft import PeftModel, PeftConfig
63
  from transformers import AutoModelForCausalLM
64
 
65
+ config = PeftConfig.from_pretrained("SwastikM/Llama-2-7B-Chat-text2code") #PEFT Config
66
+ model = AutoModelForCausalLM.from_pretrained("TheBloke/Llama-2-7b-Chat-GPTQ") #Loading the Base Model
67
+ model = PeftModel.from_pretrained(model, "SwastikM/Llama-2-7B-Chat-text2code") #Combining Trained Adapter with Base Model
68
  tokenizer = AutoTokenizer.from_pretrained("SwastikM/Llama-2-7B-Chat-text2code")
69
 
70
  inputs = tokenizer(instruction, return_tensors="pt").input_ids.to('cuda')