Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -3,7 +3,7 @@ import gradio as gr
|
|
3 |
|
4 |
|
5 |
def tokenize(input_text):
|
6 |
-
llama_tokens =
|
7 |
mistral_tokens = mistral_tokenizer(input_text, add_special_tokens=True)["input_ids"]
|
8 |
gpt2_tokens = gpt2_tokenizer(input_text, add_special_tokens=True)["input_ids"]
|
9 |
gpt_neox_tokens = gpt_neox_tokenizer(input_text, add_special_tokens=True)["input_ids"]
|
|
|
3 |
|
4 |
|
5 |
def tokenize(input_text):
|
6 |
+
llama_tokens = llama_tokenizer(input_text, add_special_tokens=True)["input_ids"]
|
7 |
mistral_tokens = mistral_tokenizer(input_text, add_special_tokens=True)["input_ids"]
|
8 |
gpt2_tokens = gpt2_tokenizer(input_text, add_special_tokens=True)["input_ids"]
|
9 |
gpt_neox_tokens = gpt_neox_tokenizer(input_text, add_special_tokens=True)["input_ids"]
|