library_name: transformers | |
license: apache-2.0 | |
language: | |
- ko | |
# usage | |
``` | |
from transformers import AutoModelForCausalLM, AutoTokenizer | |
import torch | |
path = "mssma/ko-solar-10.7b-v0.1b" | |
model = AutoModelForCausalLM.from_pretrained( | |
path, | |
return_dict=True, | |
torch_dtype=torch.float16, | |
device_map='auto' | |
) | |
tokenizer = AutoTokenizer.from_pretrained(path) | |
``` |