Update README.md
Browse files
README.md
CHANGED
@@ -58,15 +58,15 @@ Em apoio aos países de língua portuguesa.
|
|
58 |
import torch
|
59 |
from transformers import pipeline
|
60 |
|
61 |
-
|
62 |
|
63 |
messages = [{'role':'system',
|
64 |
'content':''},
|
65 |
{'role':'user',
|
66 |
'content':'Quantos planetas existem no sistema solar?'}]
|
67 |
|
68 |
-
prompt =
|
69 |
-
outputs =
|
70 |
print(outputs[0]['generated_text'])
|
71 |
|
72 |
# <s>[INST] <<SYS>>
|
|
|
58 |
import torch
|
59 |
from transformers import pipeline
|
60 |
|
61 |
+
boana = pipeline('text-generation', model='lrds-code/boana-7b-instruct', torch_dtype=torch.bfloat16, device_map='auto')
|
62 |
|
63 |
messages = [{'role':'system',
|
64 |
'content':''},
|
65 |
{'role':'user',
|
66 |
'content':'Quantos planetas existem no sistema solar?'}]
|
67 |
|
68 |
+
prompt = boana.tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
|
69 |
+
outputs = boana(prompt, max_new_tokens=256, do_sample=False, temperature=0, top_k=50, top_p=0.95)
|
70 |
print(outputs[0]['generated_text'])
|
71 |
|
72 |
# <s>[INST] <<SYS>>
|