SanghyukChun
commited on
Commit
•
a77ea36
1
Parent(s):
4d4cffe
Update README.md
Browse files
README.md
CHANGED
@@ -11,6 +11,8 @@ Zero-shot ImageNet-1k top-1 accuracy: 34.642% (slightly better than the paper sc
|
|
11 |
|
12 |
- Paper: https://openreview.net/forum?id=ft1mr3WlGM
|
13 |
- GitHub: https://github.com/naver-ai/pcmepp
|
|
|
|
|
14 |
|
15 |
```python
|
16 |
import requests
|
@@ -24,7 +26,10 @@ from hf_models import HfPCMEPPModel, tokenize
|
|
24 |
|
25 |
|
26 |
processor = CLIPProcessor.from_pretrained("openai/clip-vit-base-patch16")
|
|
|
27 |
model = HfPCMEPPModel.from_pretrained("SanghyukChun/PCMEPP-ViT-B-16-CC3M-12M-RedCaps")
|
|
|
|
|
28 |
|
29 |
|
30 |
url = "http://images.cocodataset.org/val2017/000000039769.jpg"
|
|
|
11 |
|
12 |
- Paper: https://openreview.net/forum?id=ft1mr3WlGM
|
13 |
- GitHub: https://github.com/naver-ai/pcmepp
|
14 |
+
- Check a better version with ImageNet-1k top-1 accuracy 41.812% (mean-only ZS classification) at [SanghyukChun/PCMEPP-ViT-B-16-CC3M-12M-RedCaps-256M](https://huggingface.co/SanghyukChun/PCMEPP-ViT-B-16-CC3M-12M-RedCaps-256M)
|
15 |
+
|
16 |
|
17 |
```python
|
18 |
import requests
|
|
|
26 |
|
27 |
|
28 |
processor = CLIPProcessor.from_pretrained("openai/clip-vit-base-patch16")
|
29 |
+
# IN-top1: 34.64%
|
30 |
model = HfPCMEPPModel.from_pretrained("SanghyukChun/PCMEPP-ViT-B-16-CC3M-12M-RedCaps")
|
31 |
+
# IN-top1: 41.81%
|
32 |
+
# model = HfPCMEPPModel.from_pretrained("SanghyukChun/PCMEPP-ViT-B-16-CC3M-12M-RedCaps-256M")
|
33 |
|
34 |
|
35 |
url = "http://images.cocodataset.org/val2017/000000039769.jpg"
|