alexkueck commited on
Commit
702d21c
·
1 Parent(s): f981623

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -2
app.py CHANGED
@@ -68,14 +68,16 @@ splittet = False
68
  ##############################################
69
  print ("Tokenizer")
70
  #tokenizer = AutoTokenizer.from_pretrained("meta-llama/Llama-2-70b-chat-hf")
71
- tokenizer = AutoTokenizer.from_pretrained("TheBloke/Yi-34B-Chat-GGUF")
 
72
 
73
  ##############################################
74
  # inference client
75
  ##############################################
76
  print ("Inf.Client")
77
  #client = InferenceClient("https://api-inference.huggingface.co/models/meta-llama/Llama-2-70b-chat-hf")
78
- client = InferenceClient(model="TheBloke/Yi-34B-Chat-GGUF")
 
79
 
80
 
81
  #################################################
 
68
  ##############################################
69
  print ("Tokenizer")
70
  #tokenizer = AutoTokenizer.from_pretrained("meta-llama/Llama-2-70b-chat-hf")
71
+ #tokenizer = AutoTokenizer.from_pretrained("TheBloke/Yi-34B-Chat-GGUF")
72
+ tokenizer = AutoTokenizer.from_pretrained("mistralai/Mistral-7B-Instruct-v0.1")
73
 
74
  ##############################################
75
  # inference client
76
  ##############################################
77
  print ("Inf.Client")
78
  #client = InferenceClient("https://api-inference.huggingface.co/models/meta-llama/Llama-2-70b-chat-hf")
79
+ client = InferenceClient("https://ybdhvwle4ksrawzo.eu-west-1.aws.endpoints.huggingface.cloud")
80
+ #client = InferenceClient(model="TheBloke/Yi-34B-Chat-GGUF")
81
 
82
 
83
  #################################################