jmprcp gqd commited on
Commit
d97a456
1 Parent(s): 9be1a29

use standard quotes in usage example (#1)

Browse files

- use standard quotes in readme (a83826c8d92038625336b2cafb1cfd4e89fa813b)


Co-authored-by: God <gqd@users.noreply.huggingface.co>

Files changed (1) hide show
  1. README.md +3 -3
README.md CHANGED
@@ -57,14 +57,14 @@ Here's how you can run the model using the `pipeline()` function from 🤗 Trans
57
  import torch
58
  from transformers import pipeline
59
 
60
- pipe = pipeline(text-generation”, model=“Unbabel/TowerInstruct-v0.1“, torch_dtype=torch.bfloat16, device_map=“auto)
61
  # We use the tokenizer’s chat template to format each message - see https://huggingface.co/docs/transformers/main/en/chat_templating
62
  messages = [
63
- {role”: user”, content”: Translate the following text from Portuguese into English.\nPortuguese: Um grupo de investigadores lançou um novo modelo para tarefas relacionadas com tradução.\nEnglish:“},
64
  ]
65
  prompt = pipe.tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
66
  outputs = pipe(prompt, max_new_tokens=256, do_sample=False)
67
- print(outputs[0][generated_text])
68
  # <|im_start|>user
69
  # Translate the following text from Portuguese into English.
70
  # Portuguese: Um grupo de investigadores lançou um novo modelo para tarefas relacionadas com tradução.
 
57
  import torch
58
  from transformers import pipeline
59
 
60
+ pipe = pipeline("text-generation", model="Unbabel/TowerInstruct-v0.1", torch_dtype=torch.bfloat16, device_map="auto")
61
  # We use the tokenizer’s chat template to format each message - see https://huggingface.co/docs/transformers/main/en/chat_templating
62
  messages = [
63
+ {"role": "user", "content": "Translate the following text from Portuguese into English.\nPortuguese: Um grupo de investigadores lançou um novo modelo para tarefas relacionadas com tradução.\nEnglish:"},
64
  ]
65
  prompt = pipe.tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
66
  outputs = pipe(prompt, max_new_tokens=256, do_sample=False)
67
+ print(outputs[0]["generated_text"])
68
  # <|im_start|>user
69
  # Translate the following text from Portuguese into English.
70
  # Portuguese: Um grupo de investigadores lançou um novo modelo para tarefas relacionadas com tradução.