princepride
commited on
Upload 11 files
Browse files- config.json +1 -2
- model-00001-of-00003.safetensors +3 -0
- model-00002-of-00003.safetensors +3 -0
- model-00003-of-00003.safetensors +3 -0
- model.safetensors.index.json +0 -0
- preprocessor_config.json +45 -0
config.json
CHANGED
@@ -1,5 +1,4 @@
|
|
1 |
{
|
2 |
-
"_name_or_path": "llava_model\\model001",
|
3 |
"architectures": [
|
4 |
"LlavaForConditionalGeneration"
|
5 |
],
|
@@ -32,7 +31,7 @@
|
|
32 |
"torch_dtype": "bfloat16",
|
33 |
"vocab_size": 256000
|
34 |
},
|
35 |
-
"torch_dtype": "
|
36 |
"transformers_version": "4.39.3",
|
37 |
"vision_config": {
|
38 |
"dropout": 0.0,
|
|
|
1 |
{
|
|
|
2 |
"architectures": [
|
3 |
"LlavaForConditionalGeneration"
|
4 |
],
|
|
|
31 |
"torch_dtype": "bfloat16",
|
32 |
"vocab_size": 256000
|
33 |
},
|
34 |
+
"torch_dtype": "float32",
|
35 |
"transformers_version": "4.39.3",
|
36 |
"vision_config": {
|
37 |
"dropout": 0.0,
|
model-00001-of-00003.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:765cb9784a20654e7a014f5903c078b92d8464a712f0035fbc464f92337f7ef4
|
3 |
+
size 4963861576
|
model-00002-of-00003.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d0a7b7c9ddacc4175c1dfa41e3e5393063f7ef978a7a5023aa5a70fc212c130a
|
3 |
+
size 4999820616
|
model-00003-of-00003.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d889448fcc54e169897289b232e70fb7ee5300a57a5e3e66fa2a43f4eca35d9b
|
3 |
+
size 1300294808
|
model.safetensors.index.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
preprocessor_config.json
ADDED
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_valid_processor_keys": [
|
3 |
+
"images",
|
4 |
+
"do_resize",
|
5 |
+
"size",
|
6 |
+
"resample",
|
7 |
+
"do_center_crop",
|
8 |
+
"crop_size",
|
9 |
+
"do_rescale",
|
10 |
+
"rescale_factor",
|
11 |
+
"do_normalize",
|
12 |
+
"image_mean",
|
13 |
+
"image_std",
|
14 |
+
"do_convert_rgb",
|
15 |
+
"return_tensors",
|
16 |
+
"data_format",
|
17 |
+
"input_data_format"
|
18 |
+
],
|
19 |
+
"crop_size": {
|
20 |
+
"height": 336,
|
21 |
+
"width": 336
|
22 |
+
},
|
23 |
+
"do_center_crop": true,
|
24 |
+
"do_convert_rgb": true,
|
25 |
+
"do_normalize": true,
|
26 |
+
"do_rescale": true,
|
27 |
+
"do_resize": true,
|
28 |
+
"image_mean": [
|
29 |
+
0.48145466,
|
30 |
+
0.4578275,
|
31 |
+
0.40821073
|
32 |
+
],
|
33 |
+
"image_processor_type": "CLIPImageProcessor",
|
34 |
+
"image_std": [
|
35 |
+
0.26862954,
|
36 |
+
0.26130258,
|
37 |
+
0.27577711
|
38 |
+
],
|
39 |
+
"processor_class": "CLIPProcessor",
|
40 |
+
"resample": 3,
|
41 |
+
"rescale_factor": 0.00392156862745098,
|
42 |
+
"size": {
|
43 |
+
"shortest_edge": 336
|
44 |
+
}
|
45 |
+
}
|