Kukedlc commited on
Commit
506491e
·
verified ·
1 Parent(s): 43fa2e4

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +3 -2
README.md CHANGED
@@ -68,11 +68,12 @@ model = AutoModelForCausalLM.from_pretrained(MODEL_NAME, device_map='cuda:0', lo
68
  tok = AutoTokenizer.from_pretrained(MODEL_NAME)
69
 
70
  # Inference
71
- inputs = tok(["I want you to generate a theory that unites quantum mechanics with the theory of relativity and cosmic consciousness"], return_tensors="pt").to('cuda')
 
72
  streamer = TextStreamer(tok)
73
 
74
  # Despite returning the usual output, the streamer will also print the generated text to stdout.
75
- _ = model.generate(**inputs, streamer=streamer, max_new_tokens=512)
76
 
77
  ```
78
  ## 💻 Usage - Clasic
 
68
  tok = AutoTokenizer.from_pretrained(MODEL_NAME)
69
 
70
  # Inference
71
+ prompt = "I want you to generate a theory that unites quantum mechanics with the theory of relativity and cosmic consciousness\n"
72
+ inputs = tok([prompt], return_tensors="pt").to('cuda')
73
  streamer = TextStreamer(tok)
74
 
75
  # Despite returning the usual output, the streamer will also print the generated text to stdout.
76
+ _ = model.generate(**inputs, streamer=streamer, max_new_tokens=512, do_sample=True, num_beams=1, top_p=0.9, temperature=0.7)
77
 
78
  ```
79
  ## 💻 Usage - Clasic