Text Generation
Transformers
PyTorch
English
crystalcoder
llm
code
custom_code
Eval Results
Tianhua commited on
Commit
f2ff0be
1 Parent(s): a24e68d

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +2 -2
README.md CHANGED
@@ -109,8 +109,8 @@ import torch
109
  from transformers import AutoModelForCausalLM, AutoTokenizer
110
 
111
  device = "cuda:0" if torch.cuda.is_available() else "cpu"
112
- tokenizer = AutoTokenizer.from_pretrained("/mnt/ssd1/manaslu/checkpoints/hf/phase2a/CrystalCoder_phase2a_checkpoint_1600_to_hf", trust_remote_code=True)
113
- model = AutoModelForCausalLM.from_pretrained("/mnt/ssd1/manaslu/checkpoints/hf/phase2a/CrystalCoder_phase2a_checkpoint_1600_to_hf", trust_remote_code=True).to(device)
114
 
115
  prompt = '<s> <|sys_start|> You are an AI assistant. You will be given a task. You must generate a detailed and long answer. <|sys_end|> <|im_start|> Write a python function that takes a list of integers and returns the squared sum of the list. <|im_end|>'
116
 
 
109
  from transformers import AutoModelForCausalLM, AutoTokenizer
110
 
111
  device = "cuda:0" if torch.cuda.is_available() else "cpu"
112
+ tokenizer = AutoTokenizer.from_pretrained("LLM360/CrystalChat", trust_remote_code=True)
113
+ model = AutoModelForCausalLM.from_pretrained("LLM360/CrystalChat", trust_remote_code=True).to(device)
114
 
115
  prompt = '<s> <|sys_start|> You are an AI assistant. You will be given a task. You must generate a detailed and long answer. <|sys_end|> <|im_start|> Write a python function that takes a list of integers and returns the squared sum of the list. <|im_end|>'
116