lrds-code commited on
Commit
dc3e94b
1 Parent(s): 50eb170

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +3 -3
README.md CHANGED
@@ -58,15 +58,15 @@ Em apoio aos países de língua portuguesa.
58
  import torch
59
  from transformers import pipeline
60
 
61
- samba = pipeline('text-generation', model='lrds-code/samba-1.1B', torch_dtype=torch.bfloat16, device_map='auto')
62
 
63
  messages = [{'role':'system',
64
  'content':''},
65
  {'role':'user',
66
  'content':'Quantos planetas existem no sistema solar?'}]
67
 
68
- prompt = samba.tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
69
- outputs = samba(prompt, max_new_tokens=256, do_sample=False, temperature=0.1, top_k=50, top_p=0.95, repetition_penalty=1.1, do_sample=False)
70
  print(outputs[0]['generated_text'])
71
 
72
  # <s>[INST] <<SYS>>
 
58
  import torch
59
  from transformers import pipeline
60
 
61
+ boana = pipeline('text-generation', model='lrds-code/boana-7b-instruct', torch_dtype=torch.bfloat16, device_map='auto')
62
 
63
  messages = [{'role':'system',
64
  'content':''},
65
  {'role':'user',
66
  'content':'Quantos planetas existem no sistema solar?'}]
67
 
68
+ prompt = boana.tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
69
+ outputs = boana(prompt, max_new_tokens=256, do_sample=False, temperature=0, top_k=50, top_p=0.95)
70
  print(outputs[0]['generated_text'])
71
 
72
  # <s>[INST] <<SYS>>