Jayyydyyy salilsdesai commited on
Commit
1cf16a5
1 Parent(s): 6547184

Fix setting model device for CUDA (#1)

Browse files

- Fix setting model device for CUDA (055b0e2c083f8797659f64c18ac1b3911846929d)


Co-authored-by: Salil Desai <salilsdesai@users.noreply.huggingface.co>

Files changed (1) hide show
  1. app.py +4 -2
app.py CHANGED
@@ -2,9 +2,11 @@ import gradio as gr
2
  from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, pipeline
3
  import torch
4
 
5
- model = AutoModelForSeq2SeqLM.from_pretrained("Jayyydyyy/m2m100_418m_tokipona")
6
- tokenizer = AutoTokenizer.from_pretrained("facebook/m2m100_418M")
7
  device = "cuda:0" if torch.cuda.is_available() else "cpu"
 
 
 
 
8
  LANG_CODES = {
9
  "English":"en",
10
  "toki pona":"tl"
 
2
  from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, pipeline
3
  import torch
4
 
 
 
5
  device = "cuda:0" if torch.cuda.is_available() else "cpu"
6
+
7
+ model = AutoModelForSeq2SeqLM.from_pretrained("Jayyydyyy/m2m100_418m_tokipona").to(device)
8
+ tokenizer = AutoTokenizer.from_pretrained("facebook/m2m100_418M")
9
+
10
  LANG_CODES = {
11
  "English":"en",
12
  "toki pona":"tl"