Update README.md
Browse files
README.md
CHANGED
@@ -29,15 +29,14 @@ from transformers import AutoTokenizer, AutoModelForCausalLM
|
|
29 |
|
30 |
|
31 |
|
32 |
-
tokenizer = AutoTokenizer.from_pretrained("
|
33 |
-
model = AutoModelForCausalLM.from_pretrained("
|
34 |
|
35 |
|
36 |
|
37 |
template = "<start_of_father_id>-1<end_of_father_id><start_of_local_id>0<end_of_local_id><start_of_thought><problem>{content}<end_of_thought><start_of_rating><positive_rating><end_of_rating>\n<start_of_father_id>0<end_of_father_id><start_of_local_id>1<end_of_local_id><start_of_thought><expansion>"
|
38 |
|
39 |
-
def llama_o1_template(
|
40 |
-
query = data['query']
|
41 |
text = template.format(content=query)
|
42 |
return text
|
43 |
|
@@ -51,7 +50,6 @@ def batch_predict(input_texts):
|
|
51 |
assitant_responses = [item for i,item in enumerate(response_texts)]
|
52 |
return assitant_responses
|
53 |
|
54 |
-
|
55 |
i = 'If Diana needs to bike 10 miles to reach home and she can bike at a speed of 3 mph for two hours before getting tired, and then at a speed of 1 mph until she reaches home, how long will it take her to get home?'
|
56 |
input_texts = [llama_o1_template(i)]
|
57 |
assitant_responses = batch_predict(input_texts)
|
|
|
29 |
|
30 |
|
31 |
|
32 |
+
tokenizer = AutoTokenizer.from_pretrained("/mnt/hwfile/ai4chem/CKPT/longcot_sft_llama3.1_ZD_11_29_1/")
|
33 |
+
model = AutoModelForCausalLM.from_pretrained("/mnt/hwfile/ai4chem/CKPT/longcot_sft_llama3.1_ZD_11_29_1/",device_map='auto')
|
34 |
|
35 |
|
36 |
|
37 |
template = "<start_of_father_id>-1<end_of_father_id><start_of_local_id>0<end_of_local_id><start_of_thought><problem>{content}<end_of_thought><start_of_rating><positive_rating><end_of_rating>\n<start_of_father_id>0<end_of_father_id><start_of_local_id>1<end_of_local_id><start_of_thought><expansion>"
|
38 |
|
39 |
+
def llama_o1_template(query):
|
|
|
40 |
text = template.format(content=query)
|
41 |
return text
|
42 |
|
|
|
50 |
assitant_responses = [item for i,item in enumerate(response_texts)]
|
51 |
return assitant_responses
|
52 |
|
|
|
53 |
i = 'If Diana needs to bike 10 miles to reach home and she can bike at a speed of 3 mph for two hours before getting tired, and then at a speed of 1 mph until she reaches home, how long will it take her to get home?'
|
54 |
input_texts = [llama_o1_template(i)]
|
55 |
assitant_responses = batch_predict(input_texts)
|