torVik commited on
Commit
205caac
·
verified ·
1 Parent(s): af371b5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -30,7 +30,7 @@ MAX_INPUT_TOKEN_LENGTH = int(os.getenv("MAX_INPUT_TOKEN_LENGTH", "4096"))
30
 
31
  # Debugging: GPU check passed, loading model
32
  if torch.cuda.is_available():
33
- model_id = "INSAIT-Institute/BgGPT-Gemma-2-27B-IT-v1.0"
34
  try:
35
  print("Loading model...")
36
  model = AutoModelForCausalLM.from_pretrained(model_id, torch_dtype=torch.float16, device_map="auto", token=HF_TOKEN)
 
30
 
31
  # Debugging: GPU check passed, loading model
32
  if torch.cuda.is_available():
33
+ model_id = "INSAIT-Institute/BgGPT-Gemma-2-9B-IT-v1.0"
34
  try:
35
  print("Loading model...")
36
  model = AutoModelForCausalLM.from_pretrained(model_id, torch_dtype=torch.float16, device_map="auto", token=HF_TOKEN)