Heng666 commited on
Commit
19b814b
·
verified ·
1 Parent(s): 6369cac

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -6
app.py CHANGED
@@ -7,16 +7,17 @@ from threading import Thread
7
 
8
  # Loading the tokenizer and model from Hugging Face's model hub.
9
  # model_name_or_path = "TinyLlama/TinyLlama-1.1B-Chat-v1.0"
10
- model_name_or_path = "Flmc/DISC-MedLLM"
11
  # tokenizer = AutoTokenizer.from_pretrained(model_name_or_path,trust_remote_code=True)
12
  # model = AutoModelForCausalLM.from_pretrained(model_name,trust_remote_code=True)
13
- # model = AutoModel.from_pretrained(model_name_or_path, trust_remote_code=True)
14
 
 
 
 
 
15
 
16
- tokenizer = AutoTokenizer.from_pretrained(model_name_or_path, use_fast=False, trust_remote_code=True)
17
- model = AutoModelForCausalLM.from_pretrained(model_name_or_path, device_map="auto", torch_dtype=torch.float16, trust_remote_code=True)
18
- model.generation_config = GenerationConfig.from_pretrained(model_name_or_path)
19
-
20
 
21
  # using CUDA for an optimal experience
22
  device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
 
7
 
8
  # Loading the tokenizer and model from Hugging Face's model hub.
9
  # model_name_or_path = "TinyLlama/TinyLlama-1.1B-Chat-v1.0"
 
10
  # tokenizer = AutoTokenizer.from_pretrained(model_name_or_path,trust_remote_code=True)
11
  # model = AutoModelForCausalLM.from_pretrained(model_name,trust_remote_code=True)
 
12
 
13
+ # model_name_or_path = "Flmc/DISC-MedLLM"
14
+ # tokenizer = AutoTokenizer.from_pretrained(model_name_or_path, use_fast=False, trust_remote_code=True)
15
+ # model = AutoModelForCausalLM.from_pretrained(model_name_or_path, device_map="auto", torch_dtype=torch.float16, trust_remote_code=True)
16
+ # model.generation_config = GenerationConfig.from_pretrained(model_name_or_path)
17
 
18
+ model_name_or_path = "scutcyr/BianQue-2"
19
+ tokenizer = AutoTokenizer.from_pretrained(model_name_or_path,trust_remote_code=True)
20
+ model = AutoModel.from_pretrained(model_name_or_path, trust_remote_code=True)
 
21
 
22
  # using CUDA for an optimal experience
23
  device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')