Update README.md
Browse files
README.md
CHANGED
@@ -34,10 +34,10 @@ Baichuan-13B-Chat is the aligned version in the Baichuan-13B series of models, a
|
|
34 |
如下是一个使用Baichuan-13B-Chat进行对话的示例,正确输出为"乔戈里峰。世界第二高峰———乔戈里峰西方登山者称其为k2峰,海拔高度是8611米,位于喀喇昆仑山脉的中巴边境上"
|
35 |
```python
|
36 |
import torch
|
37 |
-
from transformers import
|
38 |
from transformers.generation.utils import GenerationConfig
|
39 |
tokenizer = AutoTokenizer.from_pretrained("baichuan-inc/Baichuan-13B-Chat", use_fast=False, trust_remote_code=True)
|
40 |
-
model =
|
41 |
model.generation_config = GenerationConfig.from_pretrained("baichuan-inc/Baichuan-13B-Chat")
|
42 |
messages = []
|
43 |
messages.append({"role": "user", "content": "世界上第二高的山峰是哪座"})
|
@@ -48,10 +48,10 @@ print(response)
|
|
48 |
Here is an example of a conversation using Baichuan-13B-Chat, the correct output is "K2. The world's second highest peak - K2, also known as Mount Godwin-Austen or Chhogori, with an altitude of 8611 meters, is located on the China-Pakistan border in the Karakoram Range."
|
49 |
```python
|
50 |
import torch
|
51 |
-
from transformers import
|
52 |
from transformers.generation.utils import GenerationConfig
|
53 |
tokenizer = AutoTokenizer.from_pretrained("baichuan-inc/Baichuan-13B-Chat", use_fast=False, trust_remote_code=True)
|
54 |
-
model =
|
55 |
model.generation_config = GenerationConfig.from_pretrained("baichuan-inc/Baichuan-13B-Chat")
|
56 |
messages = []
|
57 |
messages.append({"role": "user", "content": "Which moutain is the second highest one in the world?"})
|
|
|
34 |
如下是一个使用Baichuan-13B-Chat进行对话的示例,正确输出为"乔戈里峰。世界第二高峰———乔戈里峰西方登山者称其为k2峰,海拔高度是8611米,位于喀喇昆仑山脉的中巴边境上"
|
35 |
```python
|
36 |
import torch
|
37 |
+
from transformers import AutoModelForCausalLM, AutoTokenizer
|
38 |
from transformers.generation.utils import GenerationConfig
|
39 |
tokenizer = AutoTokenizer.from_pretrained("baichuan-inc/Baichuan-13B-Chat", use_fast=False, trust_remote_code=True)
|
40 |
+
model = AutoModelForCausalLM.from_pretrained("baichuan-inc/Baichuan-13B-Chat", device_map="auto", torch_dtype=torch.float16, trust_remote_code=True)
|
41 |
model.generation_config = GenerationConfig.from_pretrained("baichuan-inc/Baichuan-13B-Chat")
|
42 |
messages = []
|
43 |
messages.append({"role": "user", "content": "世界上第二高的山峰是哪座"})
|
|
|
48 |
Here is an example of a conversation using Baichuan-13B-Chat, the correct output is "K2. The world's second highest peak - K2, also known as Mount Godwin-Austen or Chhogori, with an altitude of 8611 meters, is located on the China-Pakistan border in the Karakoram Range."
|
49 |
```python
|
50 |
import torch
|
51 |
+
from transformers import AutoModelForCausalLM, AutoTokenizer
|
52 |
from transformers.generation.utils import GenerationConfig
|
53 |
tokenizer = AutoTokenizer.from_pretrained("baichuan-inc/Baichuan-13B-Chat", use_fast=False, trust_remote_code=True)
|
54 |
+
model = AutoModelForCausalLM.from_pretrained("baichuan-inc/Baichuan-13B-Chat", device_map="auto", torch_dtype=torch.float16, trust_remote_code=True)
|
55 |
model.generation_config = GenerationConfig.from_pretrained("baichuan-inc/Baichuan-13B-Chat")
|
56 |
messages = []
|
57 |
messages.append({"role": "user", "content": "Which moutain is the second highest one in the world?"})
|