Fix setting model device for CUDA

#1
by salilsdesai - opened
Files changed (1) hide show
  1. app.py +4 -2
app.py CHANGED
@@ -2,9 +2,11 @@ import gradio as gr
2
  from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, pipeline
3
  import torch
4
 
5
- model = AutoModelForSeq2SeqLM.from_pretrained("Jayyydyyy/m2m100_418m_tokipona")
6
- tokenizer = AutoTokenizer.from_pretrained("facebook/m2m100_418M")
7
  device = "cuda:0" if torch.cuda.is_available() else "cpu"
 
 
 
 
8
  LANG_CODES = {
9
  "English":"en",
10
  "toki pona":"tl"
 
2
  from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, pipeline
3
  import torch
4
 
 
 
5
  device = "cuda:0" if torch.cuda.is_available() else "cpu"
6
+
7
+ model = AutoModelForSeq2SeqLM.from_pretrained("Jayyydyyy/m2m100_418m_tokipona").to(device)
8
+ tokenizer = AutoTokenizer.from_pretrained("facebook/m2m100_418M")
9
+
10
  LANG_CODES = {
11
  "English":"en",
12
  "toki pona":"tl"