Image-Text-to-Text
Transformers
Safetensors
English
idefics2
pretraining
multimodal
vision
Inference Endpoints
5 papers
RonanMcGovern commited on
Commit
8df9c70
1 Parent(s): 4bd5005

Upload processor

Browse files
preprocessor_config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "do_convert_rgb": true,
3
- "do_image_splitting": false,
4
  "do_normalize": true,
5
  "do_pad": true,
6
  "do_rescale": true,
 
1
  {
2
  "do_convert_rgb": true,
3
+ "do_image_splitting": true,
4
  "do_normalize": true,
5
  "do_pad": true,
6
  "do_rescale": true,
tokenizer.json CHANGED
@@ -1,7 +1,14 @@
1
  {
2
  "version": "1.0",
3
  "truncation": null,
4
- "padding": null,
 
 
 
 
 
 
 
5
  "added_tokens": [
6
  {
7
  "id": 0,
 
1
  {
2
  "version": "1.0",
3
  "truncation": null,
4
+ "padding": {
5
+ "strategy": "BatchLongest",
6
+ "direction": "Left",
7
+ "pad_to_multiple_of": null,
8
+ "pad_id": 0,
9
+ "pad_type_id": 0,
10
+ "pad_token": "<unk>"
11
+ },
12
  "added_tokens": [
13
  {
14
  "id": 0,
tokenizer_config.json CHANGED
@@ -58,7 +58,7 @@
58
  ],
59
  "bos_token": "<s>",
60
  "clean_up_tokenization_spaces": false,
61
- "do_image_splitting": false,
62
  "eos_token": "</s>",
63
  "model_max_length": 1000000000000000019884624838656,
64
  "pad_token": "<unk>",
 
58
  ],
59
  "bos_token": "<s>",
60
  "clean_up_tokenization_spaces": false,
61
+ "do_image_splitting": true,
62
  "eos_token": "</s>",
63
  "model_max_length": 1000000000000000019884624838656,
64
  "pad_token": "<unk>",