Update README.md
Browse files
README.md
CHANGED
@@ -45,6 +45,7 @@ loraアダプターになります。
|
|
45 |
## Uses
|
46 |
使用方法は以下です。
|
47 |
<!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. -->
|
|
|
48 |
from transformers import AutoModelForCausalLM, AutoTokenizer, BitsAndBytesConfig
|
49 |
import torch
|
50 |
from peft import PeftModel, PeftConfig
|
@@ -99,6 +100,7 @@ outputs = model.generate(
|
|
99 |
)
|
100 |
|
101 |
response = tokenizer.decode(outputs[0][input_ids.shape[1]:], skip_special_tokens=True)
|
|
|
102 |
### Direct Use
|
103 |
|
104 |
<!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. -->
|
|
|
45 |
## Uses
|
46 |
使用方法は以下です。
|
47 |
<!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. -->
|
48 |
+
```
|
49 |
from transformers import AutoModelForCausalLM, AutoTokenizer, BitsAndBytesConfig
|
50 |
import torch
|
51 |
from peft import PeftModel, PeftConfig
|
|
|
100 |
)
|
101 |
|
102 |
response = tokenizer.decode(outputs[0][input_ids.shape[1]:], skip_special_tokens=True)
|
103 |
+
```
|
104 |
### Direct Use
|
105 |
|
106 |
<!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. -->
|