Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- pytorch-image-models/hfdocs/source/models/tf-efficientnet-condconv.mdx +256 -0
- pytorch-image-models/hfdocs/source/models/tf-inception-v3.mdx +154 -0
- pytorch-image-models/hfdocs/source/models/wide-resnet.mdx +169 -0
- pytorch-image-models/hfdocs/source/reference/models.mdx +5 -0
- pytorch-image-models/results/README.md +67 -0
- pytorch-image-models/results/results-imagenet-r.csv +0 -0
- pytorch-image-models/results/results-imagenet-real.csv +0 -0
- pytorch-image-models/results/results-imagenet.csv +0 -0
- pytorch-image-models/results/results-imagenetv2-matched-frequency.csv +0 -0
- pytorch-image-models/results/results-sketch.csv +0 -0
- pytorch-image-models/tests/test_layers.py +121 -0
- pytorch-image-models/tests/test_models.py +710 -0
- pytorch-image-models/timm/__init__.py +4 -0
- pytorch-image-models/timm/data/__init__.py +15 -0
- pytorch-image-models/timm/data/__pycache__/auto_augment.cpython-39.pyc +0 -0
- pytorch-image-models/timm/data/__pycache__/config.cpython-39.pyc +0 -0
- pytorch-image-models/timm/data/__pycache__/dataset.cpython-39.pyc +0 -0
- pytorch-image-models/timm/data/__pycache__/dataset_factory.cpython-39.pyc +0 -0
- pytorch-image-models/timm/data/__pycache__/dataset_info.cpython-39.pyc +0 -0
- pytorch-image-models/timm/data/__pycache__/distributed_sampler.cpython-39.pyc +0 -0
- pytorch-image-models/timm/data/__pycache__/imagenet_info.cpython-39.pyc +0 -0
- pytorch-image-models/timm/data/__pycache__/mixup.cpython-39.pyc +0 -0
- pytorch-image-models/timm/data/__pycache__/random_erasing.cpython-39.pyc +0 -0
- pytorch-image-models/timm/data/__pycache__/real_labels.cpython-39.pyc +0 -0
- pytorch-image-models/timm/data/__pycache__/transforms.cpython-39.pyc +0 -0
- pytorch-image-models/timm/data/__pycache__/transforms_factory.cpython-39.pyc +0 -0
- pytorch-image-models/timm/data/_info/imagenet12k_synsets.txt +0 -0
- pytorch-image-models/timm/data/_info/imagenet21k_goog_synsets.txt +0 -0
- pytorch-image-models/timm/data/_info/imagenet22k_ms_synsets.txt +0 -0
- pytorch-image-models/timm/data/_info/imagenet22k_ms_to_12k_indices.txt +11821 -0
- pytorch-image-models/timm/data/_info/imagenet22k_ms_to_22k_indices.txt +0 -0
- pytorch-image-models/timm/data/_info/imagenet22k_synsets.txt +0 -0
- pytorch-image-models/timm/data/_info/imagenet_a_synsets.txt +200 -0
- pytorch-image-models/timm/data/_info/imagenet_r_indices.txt +200 -0
- pytorch-image-models/timm/data/_info/imagenet_r_synsets.txt +200 -0
- pytorch-image-models/timm/data/_info/imagenet_real_labels.json +0 -0
- pytorch-image-models/timm/data/_info/imagenet_synset_to_definition.txt +0 -0
- pytorch-image-models/timm/data/_info/imagenet_synset_to_lemma.txt +0 -0
- pytorch-image-models/timm/data/_info/imagenet_synsets.txt +1000 -0
- pytorch-image-models/timm/data/_info/mini_imagenet_indices.txt +100 -0
- pytorch-image-models/timm/data/_info/mini_imagenet_synsets.txt +100 -0
- pytorch-image-models/timm/data/auto_augment.py +1000 -0
- pytorch-image-models/timm/data/config.py +129 -0
- pytorch-image-models/timm/data/constants.py +10 -0
- pytorch-image-models/timm/data/dataset.py +204 -0
- pytorch-image-models/timm/data/dataset_factory.py +229 -0
- pytorch-image-models/timm/data/dataset_info.py +73 -0
- pytorch-image-models/timm/data/distributed_sampler.py +135 -0
- pytorch-image-models/timm/data/imagenet_info.py +95 -0
- pytorch-image-models/timm/data/loader.py +409 -0
pytorch-image-models/hfdocs/source/models/tf-efficientnet-condconv.mdx
ADDED
@@ -0,0 +1,256 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# (Tensorflow) EfficientNet CondConv
|
2 |
+
|
3 |
+
**EfficientNet** is a convolutional neural network architecture and scaling method that uniformly scales all dimensions of depth/width/resolution using a *compound coefficient*. Unlike conventional practice that arbitrary scales these factors, the EfficientNet scaling method uniformly scales network width, depth, and resolution with a set of fixed scaling coefficients. For example, if we want to use \\( 2^N \\) times more computational resources, then we can simply increase the network depth by \\( \alpha ^ N \\), width by \\( \beta ^ N \\), and image size by \\( \gamma ^ N \\), where \\( \alpha, \beta, \gamma \\) are constant coefficients determined by a small grid search on the original small model. EfficientNet uses a compound coefficient \\( \phi \\) to uniformly scales network width, depth, and resolution in a principled way.
|
4 |
+
|
5 |
+
The compound scaling method is justified by the intuition that if the input image is bigger, then the network needs more layers to increase the receptive field and more channels to capture more fine-grained patterns on the bigger image.
|
6 |
+
|
7 |
+
The base EfficientNet-B0 network is based on the inverted bottleneck residual blocks of [MobileNetV2](https://paperswithcode.com/method/mobilenetv2), in addition to squeeze-and-excitation blocks.
|
8 |
+
|
9 |
+
This collection of models amends EfficientNet by adding [CondConv](https://paperswithcode.com/method/condconv) convolutions.
|
10 |
+
|
11 |
+
The weights from this model were ported from [Tensorflow/TPU](https://github.com/tensorflow/tpu).
|
12 |
+
|
13 |
+
## How do I use this model on an image?
|
14 |
+
|
15 |
+
To load a pretrained model:
|
16 |
+
|
17 |
+
```py
|
18 |
+
>>> import timm
|
19 |
+
>>> model = timm.create_model('tf_efficientnet_cc_b0_4e', pretrained=True)
|
20 |
+
>>> model.eval()
|
21 |
+
```
|
22 |
+
|
23 |
+
To load and preprocess the image:
|
24 |
+
|
25 |
+
```py
|
26 |
+
>>> import urllib
|
27 |
+
>>> from PIL import Image
|
28 |
+
>>> from timm.data import resolve_data_config
|
29 |
+
>>> from timm.data.transforms_factory import create_transform
|
30 |
+
|
31 |
+
>>> config = resolve_data_config({}, model=model)
|
32 |
+
>>> transform = create_transform(**config)
|
33 |
+
|
34 |
+
>>> url, filename = ("https://github.com/pytorch/hub/raw/master/images/dog.jpg", "dog.jpg")
|
35 |
+
>>> urllib.request.urlretrieve(url, filename)
|
36 |
+
>>> img = Image.open(filename).convert('RGB')
|
37 |
+
>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension
|
38 |
+
```
|
39 |
+
|
40 |
+
To get the model predictions:
|
41 |
+
|
42 |
+
```py
|
43 |
+
>>> import torch
|
44 |
+
>>> with torch.no_grad():
|
45 |
+
... out = model(tensor)
|
46 |
+
>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)
|
47 |
+
>>> print(probabilities.shape)
|
48 |
+
>>> # prints: torch.Size([1000])
|
49 |
+
```
|
50 |
+
|
51 |
+
To get the top-5 predictions class names:
|
52 |
+
|
53 |
+
```py
|
54 |
+
>>> # Get imagenet class mappings
|
55 |
+
>>> url, filename = ("https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt", "imagenet_classes.txt")
|
56 |
+
>>> urllib.request.urlretrieve(url, filename)
|
57 |
+
>>> with open("imagenet_classes.txt", "r") as f:
|
58 |
+
... categories = [s.strip() for s in f.readlines()]
|
59 |
+
|
60 |
+
>>> # Print top categories per image
|
61 |
+
>>> top5_prob, top5_catid = torch.topk(probabilities, 5)
|
62 |
+
>>> for i in range(top5_prob.size(0)):
|
63 |
+
... print(categories[top5_catid[i]], top5_prob[i].item())
|
64 |
+
>>> # prints class names and probabilities like:
|
65 |
+
>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]
|
66 |
+
```
|
67 |
+
|
68 |
+
Replace the model name with the variant you want to use, e.g. `tf_efficientnet_cc_b0_4e`. You can find the IDs in the model summaries at the top of this page.
|
69 |
+
|
70 |
+
To extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.
|
71 |
+
|
72 |
+
## How do I finetune this model?
|
73 |
+
|
74 |
+
You can finetune any of the pre-trained models just by changing the classifier (the last layer).
|
75 |
+
|
76 |
+
```py
|
77 |
+
>>> model = timm.create_model('tf_efficientnet_cc_b0_4e', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)
|
78 |
+
```
|
79 |
+
To finetune on your own dataset, you have to write a training loop or adapt [timm's training
|
80 |
+
script](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.
|
81 |
+
|
82 |
+
## How do I train this model?
|
83 |
+
|
84 |
+
You can follow the [timm recipe scripts](../scripts) for training a new model afresh.
|
85 |
+
|
86 |
+
## Citation
|
87 |
+
|
88 |
+
```BibTeX
|
89 |
+
@article{DBLP:journals/corr/abs-1904-04971,
|
90 |
+
author = {Brandon Yang and
|
91 |
+
Gabriel Bender and
|
92 |
+
Quoc V. Le and
|
93 |
+
Jiquan Ngiam},
|
94 |
+
title = {Soft Conditional Computation},
|
95 |
+
journal = {CoRR},
|
96 |
+
volume = {abs/1904.04971},
|
97 |
+
year = {2019},
|
98 |
+
url = {http://arxiv.org/abs/1904.04971},
|
99 |
+
archivePrefix = {arXiv},
|
100 |
+
eprint = {1904.04971},
|
101 |
+
timestamp = {Thu, 25 Apr 2019 13:55:01 +0200},
|
102 |
+
biburl = {https://dblp.org/rec/journals/corr/abs-1904-04971.bib},
|
103 |
+
bibsource = {dblp computer science bibliography, https://dblp.org}
|
104 |
+
}
|
105 |
+
```
|
106 |
+
|
107 |
+
<!--
|
108 |
+
Type: model-index
|
109 |
+
Collections:
|
110 |
+
- Name: TF EfficientNet CondConv
|
111 |
+
Paper:
|
112 |
+
Title: 'CondConv: Conditionally Parameterized Convolutions for Efficient Inference'
|
113 |
+
URL: https://paperswithcode.com/paper/soft-conditional-computation
|
114 |
+
Models:
|
115 |
+
- Name: tf_efficientnet_cc_b0_4e
|
116 |
+
In Collection: TF EfficientNet CondConv
|
117 |
+
Metadata:
|
118 |
+
FLOPs: 224153788
|
119 |
+
Parameters: 13310000
|
120 |
+
File Size: 53490940
|
121 |
+
Architecture:
|
122 |
+
- 1x1 Convolution
|
123 |
+
- Average Pooling
|
124 |
+
- Batch Normalization
|
125 |
+
- CondConv
|
126 |
+
- Convolution
|
127 |
+
- Dense Connections
|
128 |
+
- Dropout
|
129 |
+
- Inverted Residual Block
|
130 |
+
- Squeeze-and-Excitation Block
|
131 |
+
- Swish
|
132 |
+
Tasks:
|
133 |
+
- Image Classification
|
134 |
+
Training Techniques:
|
135 |
+
- AutoAugment
|
136 |
+
- Label Smoothing
|
137 |
+
- RMSProp
|
138 |
+
- Stochastic Depth
|
139 |
+
- Weight Decay
|
140 |
+
Training Data:
|
141 |
+
- ImageNet
|
142 |
+
ID: tf_efficientnet_cc_b0_4e
|
143 |
+
LR: 0.256
|
144 |
+
Epochs: 350
|
145 |
+
Crop Pct: '0.875'
|
146 |
+
Momentum: 0.9
|
147 |
+
Batch Size: 2048
|
148 |
+
Image Size: '224'
|
149 |
+
Weight Decay: 1.0e-05
|
150 |
+
Interpolation: bicubic
|
151 |
+
RMSProp Decay: 0.9
|
152 |
+
Label Smoothing: 0.1
|
153 |
+
BatchNorm Momentum: 0.99
|
154 |
+
Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1561
|
155 |
+
Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_cc_b0_4e-4362b6b2.pth
|
156 |
+
Results:
|
157 |
+
- Task: Image Classification
|
158 |
+
Dataset: ImageNet
|
159 |
+
Metrics:
|
160 |
+
Top 1 Accuracy: 77.32%
|
161 |
+
Top 5 Accuracy: 93.32%
|
162 |
+
- Name: tf_efficientnet_cc_b0_8e
|
163 |
+
In Collection: TF EfficientNet CondConv
|
164 |
+
Metadata:
|
165 |
+
FLOPs: 224158524
|
166 |
+
Parameters: 24010000
|
167 |
+
File Size: 96287616
|
168 |
+
Architecture:
|
169 |
+
- 1x1 Convolution
|
170 |
+
- Average Pooling
|
171 |
+
- Batch Normalization
|
172 |
+
- CondConv
|
173 |
+
- Convolution
|
174 |
+
- Dense Connections
|
175 |
+
- Dropout
|
176 |
+
- Inverted Residual Block
|
177 |
+
- Squeeze-and-Excitation Block
|
178 |
+
- Swish
|
179 |
+
Tasks:
|
180 |
+
- Image Classification
|
181 |
+
Training Techniques:
|
182 |
+
- AutoAugment
|
183 |
+
- Label Smoothing
|
184 |
+
- RMSProp
|
185 |
+
- Stochastic Depth
|
186 |
+
- Weight Decay
|
187 |
+
Training Data:
|
188 |
+
- ImageNet
|
189 |
+
ID: tf_efficientnet_cc_b0_8e
|
190 |
+
LR: 0.256
|
191 |
+
Epochs: 350
|
192 |
+
Crop Pct: '0.875'
|
193 |
+
Momentum: 0.9
|
194 |
+
Batch Size: 2048
|
195 |
+
Image Size: '224'
|
196 |
+
Weight Decay: 1.0e-05
|
197 |
+
Interpolation: bicubic
|
198 |
+
RMSProp Decay: 0.9
|
199 |
+
Label Smoothing: 0.1
|
200 |
+
BatchNorm Momentum: 0.99
|
201 |
+
Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1572
|
202 |
+
Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_cc_b0_8e-66184a25.pth
|
203 |
+
Results:
|
204 |
+
- Task: Image Classification
|
205 |
+
Dataset: ImageNet
|
206 |
+
Metrics:
|
207 |
+
Top 1 Accuracy: 77.91%
|
208 |
+
Top 5 Accuracy: 93.65%
|
209 |
+
- Name: tf_efficientnet_cc_b1_8e
|
210 |
+
In Collection: TF EfficientNet CondConv
|
211 |
+
Metadata:
|
212 |
+
FLOPs: 370427824
|
213 |
+
Parameters: 39720000
|
214 |
+
File Size: 159206198
|
215 |
+
Architecture:
|
216 |
+
- 1x1 Convolution
|
217 |
+
- Average Pooling
|
218 |
+
- Batch Normalization
|
219 |
+
- CondConv
|
220 |
+
- Convolution
|
221 |
+
- Dense Connections
|
222 |
+
- Dropout
|
223 |
+
- Inverted Residual Block
|
224 |
+
- Squeeze-and-Excitation Block
|
225 |
+
- Swish
|
226 |
+
Tasks:
|
227 |
+
- Image Classification
|
228 |
+
Training Techniques:
|
229 |
+
- AutoAugment
|
230 |
+
- Label Smoothing
|
231 |
+
- RMSProp
|
232 |
+
- Stochastic Depth
|
233 |
+
- Weight Decay
|
234 |
+
Training Data:
|
235 |
+
- ImageNet
|
236 |
+
ID: tf_efficientnet_cc_b1_8e
|
237 |
+
LR: 0.256
|
238 |
+
Epochs: 350
|
239 |
+
Crop Pct: '0.882'
|
240 |
+
Momentum: 0.9
|
241 |
+
Batch Size: 2048
|
242 |
+
Image Size: '240'
|
243 |
+
Weight Decay: 1.0e-05
|
244 |
+
Interpolation: bicubic
|
245 |
+
RMSProp Decay: 0.9
|
246 |
+
Label Smoothing: 0.1
|
247 |
+
BatchNorm Momentum: 0.99
|
248 |
+
Code: https://github.com/rwightman/pytorch-image-models/blob/9a25fdf3ad0414b4d66da443fe60ae0aa14edc84/timm/models/efficientnet.py#L1584
|
249 |
+
Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_efficientnet_cc_b1_8e-f7c79ae1.pth
|
250 |
+
Results:
|
251 |
+
- Task: Image Classification
|
252 |
+
Dataset: ImageNet
|
253 |
+
Metrics:
|
254 |
+
Top 1 Accuracy: 79.33%
|
255 |
+
Top 5 Accuracy: 94.37%
|
256 |
+
-->
|
pytorch-image-models/hfdocs/source/models/tf-inception-v3.mdx
ADDED
@@ -0,0 +1,154 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# (Tensorflow) Inception v3
|
2 |
+
|
3 |
+
**Inception v3** is a convolutional neural network architecture from the Inception family that makes several improvements including using [Label Smoothing](https://paperswithcode.com/method/label-smoothing), Factorized 7 x 7 convolutions, and the use of an [auxiliary classifer](https://paperswithcode.com/method/auxiliary-classifier) to propagate label information lower down the network (along with the use of batch normalization for layers in the sidehead). The key building block is an [Inception Module](https://paperswithcode.com/method/inception-v3-module).
|
4 |
+
|
5 |
+
The weights from this model were ported from [Tensorflow/Models](https://github.com/tensorflow/models).
|
6 |
+
|
7 |
+
## How do I use this model on an image?
|
8 |
+
|
9 |
+
To load a pretrained model:
|
10 |
+
|
11 |
+
```py
|
12 |
+
>>> import timm
|
13 |
+
>>> model = timm.create_model('tf_inception_v3', pretrained=True)
|
14 |
+
>>> model.eval()
|
15 |
+
```
|
16 |
+
|
17 |
+
To load and preprocess the image:
|
18 |
+
|
19 |
+
```py
|
20 |
+
>>> import urllib
|
21 |
+
>>> from PIL import Image
|
22 |
+
>>> from timm.data import resolve_data_config
|
23 |
+
>>> from timm.data.transforms_factory import create_transform
|
24 |
+
|
25 |
+
>>> config = resolve_data_config({}, model=model)
|
26 |
+
>>> transform = create_transform(**config)
|
27 |
+
|
28 |
+
>>> url, filename = ("https://github.com/pytorch/hub/raw/master/images/dog.jpg", "dog.jpg")
|
29 |
+
>>> urllib.request.urlretrieve(url, filename)
|
30 |
+
>>> img = Image.open(filename).convert('RGB')
|
31 |
+
>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension
|
32 |
+
```
|
33 |
+
|
34 |
+
To get the model predictions:
|
35 |
+
|
36 |
+
```py
|
37 |
+
>>> import torch
|
38 |
+
>>> with torch.no_grad():
|
39 |
+
... out = model(tensor)
|
40 |
+
>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)
|
41 |
+
>>> print(probabilities.shape)
|
42 |
+
>>> # prints: torch.Size([1000])
|
43 |
+
```
|
44 |
+
|
45 |
+
To get the top-5 predictions class names:
|
46 |
+
|
47 |
+
```py
|
48 |
+
>>> # Get imagenet class mappings
|
49 |
+
>>> url, filename = ("https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt", "imagenet_classes.txt")
|
50 |
+
>>> urllib.request.urlretrieve(url, filename)
|
51 |
+
>>> with open("imagenet_classes.txt", "r") as f:
|
52 |
+
... categories = [s.strip() for s in f.readlines()]
|
53 |
+
|
54 |
+
>>> # Print top categories per image
|
55 |
+
>>> top5_prob, top5_catid = torch.topk(probabilities, 5)
|
56 |
+
>>> for i in range(top5_prob.size(0)):
|
57 |
+
... print(categories[top5_catid[i]], top5_prob[i].item())
|
58 |
+
>>> # prints class names and probabilities like:
|
59 |
+
>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]
|
60 |
+
```
|
61 |
+
|
62 |
+
Replace the model name with the variant you want to use, e.g. `tf_inception_v3`. You can find the IDs in the model summaries at the top of this page.
|
63 |
+
|
64 |
+
To extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.
|
65 |
+
|
66 |
+
## How do I finetune this model?
|
67 |
+
|
68 |
+
You can finetune any of the pre-trained models just by changing the classifier (the last layer).
|
69 |
+
|
70 |
+
```py
|
71 |
+
>>> model = timm.create_model('tf_inception_v3', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)
|
72 |
+
```
|
73 |
+
To finetune on your own dataset, you have to write a training loop or adapt [timm's training
|
74 |
+
script](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.
|
75 |
+
|
76 |
+
## How do I train this model?
|
77 |
+
|
78 |
+
You can follow the [timm recipe scripts](../scripts) for training a new model afresh.
|
79 |
+
|
80 |
+
## Citation
|
81 |
+
|
82 |
+
```BibTeX
|
83 |
+
@article{DBLP:journals/corr/SzegedyVISW15,
|
84 |
+
author = {Christian Szegedy and
|
85 |
+
Vincent Vanhoucke and
|
86 |
+
Sergey Ioffe and
|
87 |
+
Jonathon Shlens and
|
88 |
+
Zbigniew Wojna},
|
89 |
+
title = {Rethinking the Inception Architecture for Computer Vision},
|
90 |
+
journal = {CoRR},
|
91 |
+
volume = {abs/1512.00567},
|
92 |
+
year = {2015},
|
93 |
+
url = {http://arxiv.org/abs/1512.00567},
|
94 |
+
archivePrefix = {arXiv},
|
95 |
+
eprint = {1512.00567},
|
96 |
+
timestamp = {Mon, 13 Aug 2018 16:49:07 +0200},
|
97 |
+
biburl = {https://dblp.org/rec/journals/corr/SzegedyVISW15.bib},
|
98 |
+
bibsource = {dblp computer science bibliography, https://dblp.org}
|
99 |
+
}
|
100 |
+
```
|
101 |
+
|
102 |
+
<!--
|
103 |
+
Type: model-index
|
104 |
+
Collections:
|
105 |
+
- Name: TF Inception v3
|
106 |
+
Paper:
|
107 |
+
Title: Rethinking the Inception Architecture for Computer Vision
|
108 |
+
URL: https://paperswithcode.com/paper/rethinking-the-inception-architecture-for
|
109 |
+
Models:
|
110 |
+
- Name: tf_inception_v3
|
111 |
+
In Collection: TF Inception v3
|
112 |
+
Metadata:
|
113 |
+
FLOPs: 7352418880
|
114 |
+
Parameters: 23830000
|
115 |
+
File Size: 95549439
|
116 |
+
Architecture:
|
117 |
+
- 1x1 Convolution
|
118 |
+
- Auxiliary Classifier
|
119 |
+
- Average Pooling
|
120 |
+
- Average Pooling
|
121 |
+
- Batch Normalization
|
122 |
+
- Convolution
|
123 |
+
- Dense Connections
|
124 |
+
- Dropout
|
125 |
+
- Inception-v3 Module
|
126 |
+
- Max Pooling
|
127 |
+
- ReLU
|
128 |
+
- Softmax
|
129 |
+
Tasks:
|
130 |
+
- Image Classification
|
131 |
+
Training Techniques:
|
132 |
+
- Gradient Clipping
|
133 |
+
- Label Smoothing
|
134 |
+
- RMSProp
|
135 |
+
- Weight Decay
|
136 |
+
Training Data:
|
137 |
+
- ImageNet
|
138 |
+
Training Resources: 50x NVIDIA Kepler GPUs
|
139 |
+
ID: tf_inception_v3
|
140 |
+
LR: 0.045
|
141 |
+
Dropout: 0.2
|
142 |
+
Crop Pct: '0.875'
|
143 |
+
Momentum: 0.9
|
144 |
+
Image Size: '299'
|
145 |
+
Interpolation: bicubic
|
146 |
+
Code: https://github.com/rwightman/pytorch-image-models/blob/d8e69206be253892b2956341fea09fdebfaae4e3/timm/models/inception_v3.py#L449
|
147 |
+
Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/tf_inception_v3-e0069de4.pth
|
148 |
+
Results:
|
149 |
+
- Task: Image Classification
|
150 |
+
Dataset: ImageNet
|
151 |
+
Metrics:
|
152 |
+
Top 1 Accuracy: 77.87%
|
153 |
+
Top 5 Accuracy: 93.65%
|
154 |
+
-->
|
pytorch-image-models/hfdocs/source/models/wide-resnet.mdx
ADDED
@@ -0,0 +1,169 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Wide ResNet
|
2 |
+
|
3 |
+
**Wide Residual Networks** are a variant on [ResNets](https://paperswithcode.com/method/resnet) where we decrease depth and increase the width of residual networks. This is achieved through the use of [wide residual blocks](https://paperswithcode.com/method/wide-residual-block).
|
4 |
+
|
5 |
+
## How do I use this model on an image?
|
6 |
+
|
7 |
+
To load a pretrained model:
|
8 |
+
|
9 |
+
```py
|
10 |
+
>>> import timm
|
11 |
+
>>> model = timm.create_model('wide_resnet101_2', pretrained=True)
|
12 |
+
>>> model.eval()
|
13 |
+
```
|
14 |
+
|
15 |
+
To load and preprocess the image:
|
16 |
+
|
17 |
+
```py
|
18 |
+
>>> import urllib
|
19 |
+
>>> from PIL import Image
|
20 |
+
>>> from timm.data import resolve_data_config
|
21 |
+
>>> from timm.data.transforms_factory import create_transform
|
22 |
+
|
23 |
+
>>> config = resolve_data_config({}, model=model)
|
24 |
+
>>> transform = create_transform(**config)
|
25 |
+
|
26 |
+
>>> url, filename = ("https://github.com/pytorch/hub/raw/master/images/dog.jpg", "dog.jpg")
|
27 |
+
>>> urllib.request.urlretrieve(url, filename)
|
28 |
+
>>> img = Image.open(filename).convert('RGB')
|
29 |
+
>>> tensor = transform(img).unsqueeze(0) # transform and add batch dimension
|
30 |
+
```
|
31 |
+
|
32 |
+
To get the model predictions:
|
33 |
+
|
34 |
+
```py
|
35 |
+
>>> import torch
|
36 |
+
>>> with torch.no_grad():
|
37 |
+
... out = model(tensor)
|
38 |
+
>>> probabilities = torch.nn.functional.softmax(out[0], dim=0)
|
39 |
+
>>> print(probabilities.shape)
|
40 |
+
>>> # prints: torch.Size([1000])
|
41 |
+
```
|
42 |
+
|
43 |
+
To get the top-5 predictions class names:
|
44 |
+
|
45 |
+
```py
|
46 |
+
>>> # Get imagenet class mappings
|
47 |
+
>>> url, filename = ("https://raw.githubusercontent.com/pytorch/hub/master/imagenet_classes.txt", "imagenet_classes.txt")
|
48 |
+
>>> urllib.request.urlretrieve(url, filename)
|
49 |
+
>>> with open("imagenet_classes.txt", "r") as f:
|
50 |
+
... categories = [s.strip() for s in f.readlines()]
|
51 |
+
|
52 |
+
>>> # Print top categories per image
|
53 |
+
>>> top5_prob, top5_catid = torch.topk(probabilities, 5)
|
54 |
+
>>> for i in range(top5_prob.size(0)):
|
55 |
+
... print(categories[top5_catid[i]], top5_prob[i].item())
|
56 |
+
>>> # prints class names and probabilities like:
|
57 |
+
>>> # [('Samoyed', 0.6425196528434753), ('Pomeranian', 0.04062102362513542), ('keeshond', 0.03186424449086189), ('white wolf', 0.01739676296710968), ('Eskimo dog', 0.011717947199940681)]
|
58 |
+
```
|
59 |
+
|
60 |
+
Replace the model name with the variant you want to use, e.g. `wide_resnet101_2`. You can find the IDs in the model summaries at the top of this page.
|
61 |
+
|
62 |
+
To extract image features with this model, follow the [timm feature extraction examples](../feature_extraction), just change the name of the model you want to use.
|
63 |
+
|
64 |
+
## How do I finetune this model?
|
65 |
+
|
66 |
+
You can finetune any of the pre-trained models just by changing the classifier (the last layer).
|
67 |
+
|
68 |
+
```py
|
69 |
+
>>> model = timm.create_model('wide_resnet101_2', pretrained=True, num_classes=NUM_FINETUNE_CLASSES)
|
70 |
+
```
|
71 |
+
To finetune on your own dataset, you have to write a training loop or adapt [timm's training
|
72 |
+
script](https://github.com/rwightman/pytorch-image-models/blob/master/train.py) to use your dataset.
|
73 |
+
|
74 |
+
## How do I train this model?
|
75 |
+
|
76 |
+
You can follow the [timm recipe scripts](../scripts) for training a new model afresh.
|
77 |
+
|
78 |
+
## Citation
|
79 |
+
|
80 |
+
```BibTeX
|
81 |
+
@article{DBLP:journals/corr/ZagoruykoK16,
|
82 |
+
author = {Sergey Zagoruyko and
|
83 |
+
Nikos Komodakis},
|
84 |
+
title = {Wide Residual Networks},
|
85 |
+
journal = {CoRR},
|
86 |
+
volume = {abs/1605.07146},
|
87 |
+
year = {2016},
|
88 |
+
url = {http://arxiv.org/abs/1605.07146},
|
89 |
+
archivePrefix = {arXiv},
|
90 |
+
eprint = {1605.07146},
|
91 |
+
timestamp = {Mon, 13 Aug 2018 16:46:42 +0200},
|
92 |
+
biburl = {https://dblp.org/rec/journals/corr/ZagoruykoK16.bib},
|
93 |
+
bibsource = {dblp computer science bibliography, https://dblp.org}
|
94 |
+
}
|
95 |
+
```
|
96 |
+
|
97 |
+
<!--
|
98 |
+
Type: model-index
|
99 |
+
Collections:
|
100 |
+
- Name: Wide ResNet
|
101 |
+
Paper:
|
102 |
+
Title: Wide Residual Networks
|
103 |
+
URL: https://paperswithcode.com/paper/wide-residual-networks
|
104 |
+
Models:
|
105 |
+
- Name: wide_resnet101_2
|
106 |
+
In Collection: Wide ResNet
|
107 |
+
Metadata:
|
108 |
+
FLOPs: 29304929280
|
109 |
+
Parameters: 126890000
|
110 |
+
File Size: 254695146
|
111 |
+
Architecture:
|
112 |
+
- 1x1 Convolution
|
113 |
+
- Batch Normalization
|
114 |
+
- Convolution
|
115 |
+
- Global Average Pooling
|
116 |
+
- Max Pooling
|
117 |
+
- ReLU
|
118 |
+
- Residual Connection
|
119 |
+
- Softmax
|
120 |
+
- Wide Residual Block
|
121 |
+
Tasks:
|
122 |
+
- Image Classification
|
123 |
+
Training Data:
|
124 |
+
- ImageNet
|
125 |
+
ID: wide_resnet101_2
|
126 |
+
Crop Pct: '0.875'
|
127 |
+
Image Size: '224'
|
128 |
+
Interpolation: bilinear
|
129 |
+
Code: https://github.com/rwightman/pytorch-image-models/blob/5f9aff395c224492e9e44248b15f44b5cc095d9c/timm/models/resnet.py#L802
|
130 |
+
Weights: https://download.pytorch.org/models/wide_resnet101_2-32ee1156.pth
|
131 |
+
Results:
|
132 |
+
- Task: Image Classification
|
133 |
+
Dataset: ImageNet
|
134 |
+
Metrics:
|
135 |
+
Top 1 Accuracy: 78.85%
|
136 |
+
Top 5 Accuracy: 94.28%
|
137 |
+
- Name: wide_resnet50_2
|
138 |
+
In Collection: Wide ResNet
|
139 |
+
Metadata:
|
140 |
+
FLOPs: 14688058368
|
141 |
+
Parameters: 68880000
|
142 |
+
File Size: 275853271
|
143 |
+
Architecture:
|
144 |
+
- 1x1 Convolution
|
145 |
+
- Batch Normalization
|
146 |
+
- Convolution
|
147 |
+
- Global Average Pooling
|
148 |
+
- Max Pooling
|
149 |
+
- ReLU
|
150 |
+
- Residual Connection
|
151 |
+
- Softmax
|
152 |
+
- Wide Residual Block
|
153 |
+
Tasks:
|
154 |
+
- Image Classification
|
155 |
+
Training Data:
|
156 |
+
- ImageNet
|
157 |
+
ID: wide_resnet50_2
|
158 |
+
Crop Pct: '0.875'
|
159 |
+
Image Size: '224'
|
160 |
+
Interpolation: bicubic
|
161 |
+
Code: https://github.com/rwightman/pytorch-image-models/blob/5f9aff395c224492e9e44248b15f44b5cc095d9c/timm/models/resnet.py#L790
|
162 |
+
Weights: https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights/wide_resnet50_racm-8234f177.pth
|
163 |
+
Results:
|
164 |
+
- Task: Image Classification
|
165 |
+
Dataset: ImageNet
|
166 |
+
Metrics:
|
167 |
+
Top 1 Accuracy: 81.45%
|
168 |
+
Top 5 Accuracy: 95.52%
|
169 |
+
-->
|
pytorch-image-models/hfdocs/source/reference/models.mdx
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Models
|
2 |
+
|
3 |
+
[[autodoc]] timm.create_model
|
4 |
+
|
5 |
+
[[autodoc]] timm.list_models
|
pytorch-image-models/results/README.md
ADDED
@@ -0,0 +1,67 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Validation and Benchmark Results
|
2 |
+
|
3 |
+
This folder contains validation and benchmark results for the models in this collection. Validation scores are currently only run for models with pretrained weights and ImageNet-1k heads, benchmark numbers are run for all.
|
4 |
+
|
5 |
+
## Datasets
|
6 |
+
|
7 |
+
There are currently results for the ImageNet validation set and 5 additional test / label sets.
|
8 |
+
|
9 |
+
The test set results include rank and top-1/top-5 differences from clean validation. For the "Real Labels", ImageNetV2, and Sketch test sets, the differences were calculated against the full 1000 class ImageNet-1k validation set. For both the Adversarial and Rendition sets, the differences were calculated against 'clean' runs on the ImageNet-1k validation set with the same 200 classes used in each test set respectively.
|
10 |
+
|
11 |
+
### ImageNet Validation - [`results-imagenet.csv`](results-imagenet.csv)
|
12 |
+
|
13 |
+
The standard 50,000 image ImageNet-1k validation set. Model selection during training utilizes this validation set, so it is not a true test set. Question: Does anyone have the official ImageNet-1k test set classification labels now that challenges are done?
|
14 |
+
|
15 |
+
* Source: http://image-net.org/challenges/LSVRC/2012/index
|
16 |
+
* Paper: "ImageNet Large Scale Visual Recognition Challenge" - https://arxiv.org/abs/1409.0575
|
17 |
+
|
18 |
+
### ImageNet-"Real Labels" - [`results-imagenet-real.csv`](results-imagenet-real.csv)
|
19 |
+
|
20 |
+
The usual ImageNet-1k validation set with a fresh new set of labels intended to improve on mistakes in the original annotation process.
|
21 |
+
|
22 |
+
* Source: https://github.com/google-research/reassessed-imagenet
|
23 |
+
* Paper: "Are we done with ImageNet?" - https://arxiv.org/abs/2006.07159
|
24 |
+
|
25 |
+
### ImageNetV2 Matched Frequency - [`results-imagenetv2-matched-frequency.csv`](results-imagenetv2-matched-frequency.csv)
|
26 |
+
|
27 |
+
An ImageNet test set of 10,000 images sampled from new images roughly 10 years after the original. Care was taken to replicate the original ImageNet curation/sampling process.
|
28 |
+
|
29 |
+
* Source: https://github.com/modestyachts/ImageNetV2
|
30 |
+
* Paper: "Do ImageNet Classifiers Generalize to ImageNet?" - https://arxiv.org/abs/1902.10811
|
31 |
+
|
32 |
+
### ImageNet-Sketch - [`results-sketch.csv`](results-sketch.csv)
|
33 |
+
|
34 |
+
50,000 non photographic (or photos of such) images (sketches, doodles, mostly monochromatic) covering all 1000 ImageNet classes.
|
35 |
+
|
36 |
+
* Source: https://github.com/HaohanWang/ImageNet-Sketch
|
37 |
+
* Paper: "Learning Robust Global Representations by Penalizing Local Predictive Power" - https://arxiv.org/abs/1905.13549
|
38 |
+
|
39 |
+
### ImageNet-Adversarial - [`results-imagenet-a.csv`](results-imagenet-a.csv)
|
40 |
+
|
41 |
+
A collection of 7500 images covering 200 of the 1000 ImageNet classes. Images are naturally occurring adversarial examples that confuse typical ImageNet classifiers. This is a challenging dataset, your typical ResNet-50 will score 0% top-1.
|
42 |
+
|
43 |
+
For clean validation with same 200 classes, see [`results-imagenet-a-clean.csv`](results-imagenet-a-clean.csv)
|
44 |
+
|
45 |
+
* Source: https://github.com/hendrycks/natural-adv-examples
|
46 |
+
* Paper: "Natural Adversarial Examples" - https://arxiv.org/abs/1907.07174
|
47 |
+
|
48 |
+
### ImageNet-Rendition - [`results-imagenet-r.csv`](results-imagenet-r.csv)
|
49 |
+
|
50 |
+
Renditions of 200 ImageNet classes resulting in 30,000 images for testing robustness.
|
51 |
+
|
52 |
+
For clean validation with same 200 classes, see [`results-imagenet-r-clean.csv`](results-imagenet-r-clean.csv)
|
53 |
+
|
54 |
+
* Source: https://github.com/hendrycks/imagenet-r
|
55 |
+
* Paper: "The Many Faces of Robustness" - https://arxiv.org/abs/2006.16241
|
56 |
+
|
57 |
+
### TODO
|
58 |
+
* Explore adding a reduced version of ImageNet-C (Corruptions) and ImageNet-P (Perturbations) from https://github.com/hendrycks/robustness. The originals are huge and image size specific.
|
59 |
+
|
60 |
+
|
61 |
+
## Benchmark
|
62 |
+
|
63 |
+
CSV files with a `model_benchmark` prefix include benchmark numbers for models on various accelerators with different precision. Currently only run on RTX 3090 w/ AMP for inference, I intend to add more in the future.
|
64 |
+
|
65 |
+
## Metadata
|
66 |
+
|
67 |
+
CSV files with `model_metadata` prefix contain extra information about the source training, currently the pretraining dataset and technique (ie distillation, SSL, WSL, etc). Eventually I'd like to have metadata about augmentation, regularization, etc. but that will be a challenge to source consistently.
|
pytorch-image-models/results/results-imagenet-r.csv
ADDED
The diff for this file is too large to render.
See raw diff
|
|
pytorch-image-models/results/results-imagenet-real.csv
ADDED
The diff for this file is too large to render.
See raw diff
|
|
pytorch-image-models/results/results-imagenet.csv
ADDED
The diff for this file is too large to render.
See raw diff
|
|
pytorch-image-models/results/results-imagenetv2-matched-frequency.csv
ADDED
The diff for this file is too large to render.
See raw diff
|
|
pytorch-image-models/results/results-sketch.csv
ADDED
The diff for this file is too large to render.
See raw diff
|
|
pytorch-image-models/tests/test_layers.py
ADDED
@@ -0,0 +1,121 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
import torch.nn as nn
|
3 |
+
|
4 |
+
from timm.layers import create_act_layer, set_layer_config, get_act_layer, get_act_fn
|
5 |
+
|
6 |
+
import importlib
|
7 |
+
import os
|
8 |
+
|
9 |
+
torch_backend = os.environ.get('TORCH_BACKEND')
|
10 |
+
if torch_backend is not None:
|
11 |
+
importlib.import_module(torch_backend)
|
12 |
+
torch_device = os.environ.get('TORCH_DEVICE', 'cpu')
|
13 |
+
|
14 |
+
class MLP(nn.Module):
|
15 |
+
def __init__(self, act_layer="relu", inplace=True):
|
16 |
+
super(MLP, self).__init__()
|
17 |
+
self.fc1 = nn.Linear(1000, 100)
|
18 |
+
self.act = create_act_layer(act_layer, inplace=inplace)
|
19 |
+
self.fc2 = nn.Linear(100, 10)
|
20 |
+
|
21 |
+
def forward(self, x):
|
22 |
+
x = self.fc1(x)
|
23 |
+
x = self.act(x)
|
24 |
+
x = self.fc2(x)
|
25 |
+
return x
|
26 |
+
|
27 |
+
|
28 |
+
def _run_act_layer_grad(act_type, inplace=True):
|
29 |
+
x = torch.rand(10, 1000) * 10
|
30 |
+
m = MLP(act_layer=act_type, inplace=inplace)
|
31 |
+
|
32 |
+
def _run(x, act_layer=''):
|
33 |
+
if act_layer:
|
34 |
+
# replace act layer if set
|
35 |
+
m.act = create_act_layer(act_layer, inplace=inplace)
|
36 |
+
out = m(x)
|
37 |
+
l = (out - 0).pow(2).sum()
|
38 |
+
return l
|
39 |
+
|
40 |
+
x = x.to(device=torch_device)
|
41 |
+
m.to(device=torch_device)
|
42 |
+
|
43 |
+
out_me = _run(x)
|
44 |
+
|
45 |
+
with set_layer_config(scriptable=True):
|
46 |
+
out_jit = _run(x, act_type)
|
47 |
+
|
48 |
+
assert torch.isclose(out_jit, out_me)
|
49 |
+
|
50 |
+
with set_layer_config(no_jit=True):
|
51 |
+
out_basic = _run(x, act_type)
|
52 |
+
|
53 |
+
assert torch.isclose(out_basic, out_jit)
|
54 |
+
|
55 |
+
|
56 |
+
def test_swish_grad():
|
57 |
+
for _ in range(100):
|
58 |
+
_run_act_layer_grad('swish')
|
59 |
+
|
60 |
+
|
61 |
+
def test_mish_grad():
|
62 |
+
for _ in range(100):
|
63 |
+
_run_act_layer_grad('mish')
|
64 |
+
|
65 |
+
|
66 |
+
def test_hard_sigmoid_grad():
|
67 |
+
for _ in range(100):
|
68 |
+
_run_act_layer_grad('hard_sigmoid', inplace=None)
|
69 |
+
|
70 |
+
|
71 |
+
def test_hard_swish_grad():
|
72 |
+
for _ in range(100):
|
73 |
+
_run_act_layer_grad('hard_swish')
|
74 |
+
|
75 |
+
|
76 |
+
def test_hard_mish_grad():
|
77 |
+
for _ in range(100):
|
78 |
+
_run_act_layer_grad('hard_mish')
|
79 |
+
|
80 |
+
def test_get_act_layer_empty_string():
|
81 |
+
# Empty string should return None
|
82 |
+
assert get_act_layer('') is None
|
83 |
+
|
84 |
+
|
85 |
+
def test_create_act_layer_inplace_error():
|
86 |
+
class NoInplaceAct(nn.Module):
|
87 |
+
def __init__(self):
|
88 |
+
super().__init__()
|
89 |
+
def forward(self, x):
|
90 |
+
return x
|
91 |
+
|
92 |
+
# Should recover when inplace arg causes TypeError
|
93 |
+
layer = create_act_layer(NoInplaceAct, inplace=True)
|
94 |
+
assert isinstance(layer, NoInplaceAct)
|
95 |
+
|
96 |
+
|
97 |
+
def test_create_act_layer_edge_cases():
|
98 |
+
# Test None input
|
99 |
+
assert create_act_layer(None) is None
|
100 |
+
|
101 |
+
# Test TypeError handling for inplace
|
102 |
+
class CustomAct(nn.Module):
|
103 |
+
def __init__(self, **kwargs):
|
104 |
+
super().__init__()
|
105 |
+
def forward(self, x):
|
106 |
+
return x
|
107 |
+
|
108 |
+
result = create_act_layer(CustomAct, inplace=True)
|
109 |
+
assert isinstance(result, CustomAct)
|
110 |
+
|
111 |
+
|
112 |
+
def test_get_act_fn_callable():
|
113 |
+
def custom_act(x):
|
114 |
+
return x
|
115 |
+
assert get_act_fn(custom_act) is custom_act
|
116 |
+
|
117 |
+
|
118 |
+
def test_get_act_fn_none():
|
119 |
+
assert get_act_fn(None) is None
|
120 |
+
assert get_act_fn('') is None
|
121 |
+
|
pytorch-image-models/tests/test_models.py
ADDED
@@ -0,0 +1,710 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Run tests for all models
|
2 |
+
|
3 |
+
Tests that run on CI should have a specific marker, e.g. @pytest.mark.base. This
|
4 |
+
marker is used to parallelize the CI runs, with one runner for each marker.
|
5 |
+
|
6 |
+
If new tests are added, ensure that they use one of the existing markers
|
7 |
+
(documented in pyproject.toml > pytest > markers) or that a new marker is added
|
8 |
+
for this set of tests. If using a new marker, adjust the test matrix in
|
9 |
+
.github/workflows/tests.yml to run tests with this new marker, otherwise the
|
10 |
+
tests will be skipped on CI.
|
11 |
+
|
12 |
+
"""
|
13 |
+
|
14 |
+
import pytest
|
15 |
+
import torch
|
16 |
+
import platform
|
17 |
+
import os
|
18 |
+
import fnmatch
|
19 |
+
|
20 |
+
_IS_MAC = platform.system() == 'Darwin'
|
21 |
+
|
22 |
+
try:
|
23 |
+
from torchvision.models.feature_extraction import create_feature_extractor, get_graph_node_names, NodePathTracer
|
24 |
+
has_fx_feature_extraction = True
|
25 |
+
except ImportError:
|
26 |
+
has_fx_feature_extraction = False
|
27 |
+
|
28 |
+
import timm
|
29 |
+
from timm import list_models, list_pretrained, create_model, set_scriptable, get_pretrained_cfg_value
|
30 |
+
from timm.layers import Format, get_spatial_dim, get_channel_dim
|
31 |
+
from timm.models import get_notrace_modules, get_notrace_functions
|
32 |
+
|
33 |
+
import importlib
|
34 |
+
import os
|
35 |
+
|
36 |
+
torch_backend = os.environ.get('TORCH_BACKEND')
|
37 |
+
if torch_backend is not None:
|
38 |
+
importlib.import_module(torch_backend)
|
39 |
+
torch_device = os.environ.get('TORCH_DEVICE', 'cpu')
|
40 |
+
timeout = os.environ.get('TIMEOUT')
|
41 |
+
timeout120 = int(timeout) if timeout else 120
|
42 |
+
timeout240 = int(timeout) if timeout else 240
|
43 |
+
timeout360 = int(timeout) if timeout else 360
|
44 |
+
|
45 |
+
if hasattr(torch._C, '_jit_set_profiling_executor'):
|
46 |
+
# legacy executor is too slow to compile large models for unit tests
|
47 |
+
# no need for the fusion performance here
|
48 |
+
torch._C._jit_set_profiling_executor(True)
|
49 |
+
torch._C._jit_set_profiling_mode(False)
|
50 |
+
|
51 |
+
# models with forward_intermediates() and support for FeatureGetterNet features_only wrapper
|
52 |
+
FEAT_INTER_FILTERS = [
|
53 |
+
'vision_transformer', 'vision_transformer_sam', 'vision_transformer_hybrid', 'vision_transformer_relpos',
|
54 |
+
'beit', 'mvitv2', 'eva', 'cait', 'xcit', 'volo', 'twins', 'deit', 'swin_transformer', 'swin_transformer_v2',
|
55 |
+
'swin_transformer_v2_cr', 'maxxvit', 'efficientnet', 'mobilenetv3', 'levit', 'efficientformer', 'resnet',
|
56 |
+
'regnet', 'byobnet', 'byoanet', 'mlp_mixer', 'hiera', 'fastvit', 'hieradet_sam2'
|
57 |
+
]
|
58 |
+
|
59 |
+
# transformer / hybrid models don't support full set of spatial / feature APIs and/or have spatial output.
|
60 |
+
NON_STD_FILTERS = [
|
61 |
+
'vit_*', 'tnt_*', 'pit_*', 'coat_*', 'cait_*', '*mixer_*', 'gmlp_*', 'resmlp_*', 'twins_*',
|
62 |
+
'convit_*', 'levit*', 'visformer*', 'deit*', 'xcit_*', 'crossvit_*', 'beit*',
|
63 |
+
'poolformer_*', 'volo_*', 'sequencer2d_*', 'mvitv2*', 'gcvit*', 'efficientformer*', 'sam_hiera*',
|
64 |
+
'eva_*', 'flexivit*', 'eva02*', 'samvit_*', 'efficientvit_m*', 'tiny_vit_*', 'hiera_*', 'vitamin*', 'test_vit*',
|
65 |
+
]
|
66 |
+
NUM_NON_STD = len(NON_STD_FILTERS)
|
67 |
+
|
68 |
+
# exclude models that cause specific test failures
|
69 |
+
if 'GITHUB_ACTIONS' in os.environ:
|
70 |
+
# GitHub Linux runner is slower and hits memory limits sooner than MacOS, exclude bigger models
|
71 |
+
EXCLUDE_FILTERS = [
|
72 |
+
'*efficientnet_l2*', '*resnext101_32x48d', '*in21k', '*152x4_bitm', '*101x3_bitm', '*50x3_bitm',
|
73 |
+
'*nfnet_f3*', '*nfnet_f4*', '*nfnet_f5*', '*nfnet_f6*', '*nfnet_f7*', '*efficientnetv2_xl*',
|
74 |
+
'*resnetrs350*', '*resnetrs420*', 'xcit_large_24_p8*', '*huge*', '*giant*', '*gigantic*',
|
75 |
+
'*enormous*', 'maxvit_xlarge*', 'regnet*1280', 'regnet*2560']
|
76 |
+
NON_STD_EXCLUDE_FILTERS = ['*huge*', '*giant*', '*gigantic*', '*enormous*']
|
77 |
+
else:
|
78 |
+
EXCLUDE_FILTERS = ['*enormous*']
|
79 |
+
NON_STD_EXCLUDE_FILTERS = ['*gigantic*', '*enormous*']
|
80 |
+
|
81 |
+
EXCLUDE_JIT_FILTERS = ['hiera_*']
|
82 |
+
|
83 |
+
TARGET_FWD_SIZE = MAX_FWD_SIZE = 384
|
84 |
+
TARGET_BWD_SIZE = 128
|
85 |
+
MAX_BWD_SIZE = 320
|
86 |
+
MAX_FWD_OUT_SIZE = 448
|
87 |
+
TARGET_JIT_SIZE = 128
|
88 |
+
MAX_JIT_SIZE = 320
|
89 |
+
TARGET_FFEAT_SIZE = 96
|
90 |
+
MAX_FFEAT_SIZE = 256
|
91 |
+
TARGET_FWD_FX_SIZE = 128
|
92 |
+
MAX_FWD_FX_SIZE = 256
|
93 |
+
TARGET_BWD_FX_SIZE = 128
|
94 |
+
MAX_BWD_FX_SIZE = 224
|
95 |
+
|
96 |
+
|
97 |
+
def _get_input_size(model=None, model_name='', target=None):
|
98 |
+
if model is None:
|
99 |
+
assert model_name, "One of model or model_name must be provided"
|
100 |
+
input_size = get_pretrained_cfg_value(model_name, 'input_size')
|
101 |
+
fixed_input_size = get_pretrained_cfg_value(model_name, 'fixed_input_size')
|
102 |
+
min_input_size = get_pretrained_cfg_value(model_name, 'min_input_size')
|
103 |
+
else:
|
104 |
+
default_cfg = model.default_cfg
|
105 |
+
input_size = default_cfg['input_size']
|
106 |
+
fixed_input_size = default_cfg.get('fixed_input_size', None)
|
107 |
+
min_input_size = default_cfg.get('min_input_size', None)
|
108 |
+
assert input_size is not None
|
109 |
+
|
110 |
+
if fixed_input_size:
|
111 |
+
return input_size
|
112 |
+
|
113 |
+
if min_input_size:
|
114 |
+
if target and max(input_size) > target:
|
115 |
+
input_size = min_input_size
|
116 |
+
else:
|
117 |
+
if target and max(input_size) > target:
|
118 |
+
input_size = tuple([min(x, target) for x in input_size])
|
119 |
+
return input_size
|
120 |
+
|
121 |
+
|
122 |
+
@pytest.mark.base
|
123 |
+
@pytest.mark.timeout(timeout240)
|
124 |
+
@pytest.mark.parametrize('model_name', list_pretrained('test_*'))
|
125 |
+
@pytest.mark.parametrize('batch_size', [1])
|
126 |
+
def test_model_inference(model_name, batch_size):
|
127 |
+
"""Run a single forward pass with each model"""
|
128 |
+
from PIL import Image
|
129 |
+
from huggingface_hub import snapshot_download
|
130 |
+
import tempfile
|
131 |
+
import safetensors
|
132 |
+
|
133 |
+
model = create_model(model_name, pretrained=True)
|
134 |
+
model.eval()
|
135 |
+
pp = timm.data.create_transform(**timm.data.resolve_data_config(model=model))
|
136 |
+
|
137 |
+
with tempfile.TemporaryDirectory() as temp_dir:
|
138 |
+
snapshot_download(
|
139 |
+
repo_id='timm/' + model_name, repo_type='model', local_dir=temp_dir, allow_patterns='test/*'
|
140 |
+
)
|
141 |
+
rand_tensors = safetensors.torch.load_file(os.path.join(temp_dir, 'test', 'rand_tensors.safetensors'))
|
142 |
+
owl_tensors = safetensors.torch.load_file(os.path.join(temp_dir, 'test', 'owl_tensors.safetensors'))
|
143 |
+
test_owl = Image.open(os.path.join(temp_dir, 'test', 'test_owl.jpg'))
|
144 |
+
|
145 |
+
with torch.no_grad():
|
146 |
+
rand_output = model(rand_tensors['input'])
|
147 |
+
rand_features = model.forward_features(rand_tensors['input'])
|
148 |
+
rand_pre_logits = model.forward_head(rand_features, pre_logits=True)
|
149 |
+
assert torch.allclose(rand_output, rand_tensors['output'], rtol=1e-3, atol=1e-4), 'rand output does not match'
|
150 |
+
assert torch.allclose(rand_features, rand_tensors['features'], rtol=1e-3, atol=1e-4), 'rand features do not match'
|
151 |
+
assert torch.allclose(rand_pre_logits, rand_tensors['pre_logits'], rtol=1e-3, atol=1e-4), 'rand pre_logits do not match'
|
152 |
+
|
153 |
+
def _test_owl(owl_input, tol=(1e-3, 1e-4)):
|
154 |
+
owl_output = model(owl_input)
|
155 |
+
owl_features = model.forward_features(owl_input)
|
156 |
+
owl_pre_logits = model.forward_head(owl_features.clone(), pre_logits=True)
|
157 |
+
assert owl_output.softmax(1).argmax(1) == 24 # owl
|
158 |
+
assert torch.allclose(owl_output, owl_tensors['output'], rtol=tol[0], atol=tol[1]), 'owl output does not match'
|
159 |
+
assert torch.allclose(owl_features, owl_tensors['features'], rtol=tol[0], atol=tol[1]), 'owl output does not match'
|
160 |
+
assert torch.allclose(owl_pre_logits, owl_tensors['pre_logits'], rtol=tol[0], atol=tol[1]), 'owl output does not match'
|
161 |
+
|
162 |
+
_test_owl(owl_tensors['input']) # test with original pp owl tensor
|
163 |
+
_test_owl(pp(test_owl).unsqueeze(0), tol=(1e-1, 1e-1)) # re-process from original jpg, Pillow output can change a lot btw ver
|
164 |
+
|
165 |
+
|
166 |
+
@pytest.mark.base
|
167 |
+
@pytest.mark.timeout(timeout120)
|
168 |
+
@pytest.mark.parametrize('model_name', list_models(exclude_filters=EXCLUDE_FILTERS))
|
169 |
+
@pytest.mark.parametrize('batch_size', [1])
|
170 |
+
def test_model_forward(model_name, batch_size):
|
171 |
+
"""Run a single forward pass with each model"""
|
172 |
+
model = create_model(model_name, pretrained=False)
|
173 |
+
model.eval()
|
174 |
+
|
175 |
+
input_size = _get_input_size(model=model, target=TARGET_FWD_SIZE)
|
176 |
+
if max(input_size) > MAX_FWD_SIZE:
|
177 |
+
pytest.skip("Fixed input size model > limit.")
|
178 |
+
inputs = torch.randn((batch_size, *input_size))
|
179 |
+
inputs = inputs.to(torch_device)
|
180 |
+
model.to(torch_device)
|
181 |
+
outputs = model(inputs)
|
182 |
+
|
183 |
+
assert outputs.shape[0] == batch_size
|
184 |
+
assert not torch.isnan(outputs).any(), 'Output included NaNs'
|
185 |
+
|
186 |
+
|
187 |
+
@pytest.mark.base
|
188 |
+
@pytest.mark.timeout(timeout120)
|
189 |
+
@pytest.mark.parametrize('model_name', list_models(exclude_filters=EXCLUDE_FILTERS, name_matches_cfg=True))
|
190 |
+
@pytest.mark.parametrize('batch_size', [2])
|
191 |
+
def test_model_backward(model_name, batch_size):
|
192 |
+
"""Run a single forward pass with each model"""
|
193 |
+
input_size = _get_input_size(model_name=model_name, target=TARGET_BWD_SIZE)
|
194 |
+
if max(input_size) > MAX_BWD_SIZE:
|
195 |
+
pytest.skip("Fixed input size model > limit.")
|
196 |
+
|
197 |
+
model = create_model(model_name, pretrained=False, num_classes=42)
|
198 |
+
num_params = sum([x.numel() for x in model.parameters()])
|
199 |
+
model.train()
|
200 |
+
|
201 |
+
inputs = torch.randn((batch_size, *input_size))
|
202 |
+
inputs = inputs.to(torch_device)
|
203 |
+
model.to(torch_device)
|
204 |
+
outputs = model(inputs)
|
205 |
+
if isinstance(outputs, tuple):
|
206 |
+
outputs = torch.cat(outputs)
|
207 |
+
outputs.mean().backward()
|
208 |
+
for n, x in model.named_parameters():
|
209 |
+
assert x.grad is not None, f'No gradient for {n}'
|
210 |
+
num_grad = sum([x.grad.numel() for x in model.parameters() if x.grad is not None])
|
211 |
+
|
212 |
+
assert outputs.shape[-1] == 42
|
213 |
+
assert num_params == num_grad, 'Some parameters are missing gradients'
|
214 |
+
assert not torch.isnan(outputs).any(), 'Output included NaNs'
|
215 |
+
|
216 |
+
|
217 |
+
# models with extra conv/linear layers after pooling
|
218 |
+
EARLY_POOL_MODELS = (
|
219 |
+
timm.models.EfficientVit,
|
220 |
+
timm.models.EfficientVitLarge,
|
221 |
+
timm.models.HighPerfGpuNet,
|
222 |
+
timm.models.GhostNet,
|
223 |
+
timm.models.MetaNeXt, # InceptionNeXt
|
224 |
+
timm.models.MobileNetV3,
|
225 |
+
timm.models.RepGhostNet,
|
226 |
+
timm.models.VGG,
|
227 |
+
)
|
228 |
+
|
229 |
+
@pytest.mark.cfg
|
230 |
+
@pytest.mark.timeout(timeout360)
|
231 |
+
@pytest.mark.parametrize('model_name', list_models(
|
232 |
+
exclude_filters=EXCLUDE_FILTERS + NON_STD_FILTERS, include_tags=True))
|
233 |
+
@pytest.mark.parametrize('batch_size', [1])
|
234 |
+
def test_model_default_cfgs(model_name, batch_size):
|
235 |
+
"""Run a single forward pass with each model"""
|
236 |
+
model = create_model(model_name, pretrained=False)
|
237 |
+
model.eval()
|
238 |
+
model.to(torch_device)
|
239 |
+
assert getattr(model, 'num_classes') >= 0
|
240 |
+
assert getattr(model, 'num_features') > 0
|
241 |
+
assert getattr(model, 'head_hidden_size') > 0
|
242 |
+
state_dict = model.state_dict()
|
243 |
+
cfg = model.default_cfg
|
244 |
+
|
245 |
+
pool_size = cfg['pool_size']
|
246 |
+
input_size = model.default_cfg['input_size']
|
247 |
+
output_fmt = getattr(model, 'output_fmt', 'NCHW')
|
248 |
+
spatial_axis = get_spatial_dim(output_fmt)
|
249 |
+
assert len(spatial_axis) == 2 # TODO add 1D sequence support
|
250 |
+
feat_axis = get_channel_dim(output_fmt)
|
251 |
+
|
252 |
+
if all([x <= MAX_FWD_OUT_SIZE for x in input_size]) and \
|
253 |
+
not any([fnmatch.fnmatch(model_name, x) for x in EXCLUDE_FILTERS]):
|
254 |
+
# output sizes only checked if default res <= 448 * 448 to keep resource down
|
255 |
+
input_size = tuple([min(x, MAX_FWD_OUT_SIZE) for x in input_size])
|
256 |
+
input_tensor = torch.randn((batch_size, *input_size), device=torch_device)
|
257 |
+
|
258 |
+
# test forward_features (always unpooled) & forward_head w/ pre_logits
|
259 |
+
outputs = model.forward_features(input_tensor)
|
260 |
+
outputs_pre = model.forward_head(outputs, pre_logits=True)
|
261 |
+
assert outputs.shape[spatial_axis[0]] == pool_size[0], f'unpooled feature shape {outputs.shape} != config'
|
262 |
+
assert outputs.shape[spatial_axis[1]] == pool_size[1], f'unpooled feature shape {outputs.shape} != config'
|
263 |
+
assert outputs.shape[feat_axis] == model.num_features, f'unpooled feature dim {outputs.shape[feat_axis]} != model.num_features {model.num_features}'
|
264 |
+
assert outputs_pre.shape[1] == model.head_hidden_size, f'pre_logits feature dim {outputs_pre.shape[1]} != model.head_hidden_size {model.head_hidden_size}'
|
265 |
+
|
266 |
+
# test forward after deleting the classifier, output should be poooled, size(-1) == model.num_features
|
267 |
+
model.reset_classifier(0)
|
268 |
+
model.to(torch_device)
|
269 |
+
outputs = model.forward(input_tensor)
|
270 |
+
assert len(outputs.shape) == 2
|
271 |
+
assert outputs.shape[1] == model.head_hidden_size, f'feature dim w/ removed classifier {outputs.shape[1]} != model.head_hidden_size {model.head_hidden_size}'
|
272 |
+
assert outputs.shape == outputs_pre.shape, f'output shape of pre_logits {outputs_pre.shape} does not match reset_head(0) {outputs.shape}'
|
273 |
+
|
274 |
+
# test model forward after removing pooling and classifier
|
275 |
+
if not isinstance(model, EARLY_POOL_MODELS):
|
276 |
+
model.reset_classifier(0, '') # reset classifier and disable global pooling
|
277 |
+
model.to(torch_device)
|
278 |
+
outputs = model.forward(input_tensor)
|
279 |
+
assert len(outputs.shape) == 4
|
280 |
+
assert outputs.shape[spatial_axis[0]] == pool_size[0] and outputs.shape[spatial_axis[1]] == pool_size[1]
|
281 |
+
|
282 |
+
# test classifier + global pool deletion via __init__
|
283 |
+
if 'pruned' not in model_name and not isinstance(model, EARLY_POOL_MODELS):
|
284 |
+
model = create_model(model_name, pretrained=False, num_classes=0, global_pool='').eval()
|
285 |
+
model.to(torch_device)
|
286 |
+
outputs = model.forward(input_tensor)
|
287 |
+
assert len(outputs.shape) == 4
|
288 |
+
assert outputs.shape[spatial_axis[0]] == pool_size[0] and outputs.shape[spatial_axis[1]] == pool_size[1]
|
289 |
+
|
290 |
+
# check classifier name matches default_cfg
|
291 |
+
if cfg.get('num_classes', None):
|
292 |
+
classifier = cfg['classifier']
|
293 |
+
if not isinstance(classifier, (tuple, list)):
|
294 |
+
classifier = classifier,
|
295 |
+
for c in classifier:
|
296 |
+
assert c + ".weight" in state_dict.keys(), f'{c} not in model params'
|
297 |
+
|
298 |
+
# check first conv(s) names match default_cfg
|
299 |
+
first_conv = cfg['first_conv']
|
300 |
+
if isinstance(first_conv, str):
|
301 |
+
first_conv = (first_conv,)
|
302 |
+
assert isinstance(first_conv, (tuple, list))
|
303 |
+
for fc in first_conv:
|
304 |
+
assert fc + ".weight" in state_dict.keys(), f'{fc} not in model params'
|
305 |
+
|
306 |
+
|
307 |
+
@pytest.mark.cfg
|
308 |
+
@pytest.mark.timeout(timeout360)
|
309 |
+
@pytest.mark.parametrize('model_name', list_models(filter=NON_STD_FILTERS, exclude_filters=NON_STD_EXCLUDE_FILTERS, include_tags=True))
|
310 |
+
@pytest.mark.parametrize('batch_size', [1])
|
311 |
+
def test_model_default_cfgs_non_std(model_name, batch_size):
|
312 |
+
"""Run a single forward pass with each model"""
|
313 |
+
model = create_model(model_name, pretrained=False)
|
314 |
+
model.eval()
|
315 |
+
model.to(torch_device)
|
316 |
+
assert getattr(model, 'num_classes') >= 0
|
317 |
+
assert getattr(model, 'num_features') > 0
|
318 |
+
assert getattr(model, 'head_hidden_size') > 0
|
319 |
+
state_dict = model.state_dict()
|
320 |
+
cfg = model.default_cfg
|
321 |
+
|
322 |
+
input_size = _get_input_size(model=model)
|
323 |
+
if max(input_size) > 320: # FIXME const
|
324 |
+
pytest.skip("Fixed input size model > limit.")
|
325 |
+
|
326 |
+
input_tensor = torch.randn((batch_size, *input_size), device=torch_device)
|
327 |
+
feat_dim = getattr(model, 'feature_dim', None)
|
328 |
+
|
329 |
+
outputs = model.forward_features(input_tensor)
|
330 |
+
outputs_pre = model.forward_head(outputs, pre_logits=True)
|
331 |
+
if isinstance(outputs, (tuple, list)):
|
332 |
+
# cannot currently verify multi-tensor output.
|
333 |
+
pass
|
334 |
+
else:
|
335 |
+
if feat_dim is None:
|
336 |
+
feat_dim = -1 if outputs.ndim == 3 else 1
|
337 |
+
assert outputs.shape[feat_dim] == model.num_features
|
338 |
+
assert outputs_pre.shape[1] == model.head_hidden_size
|
339 |
+
|
340 |
+
# test forward after deleting the classifier, output should be poooled, size(-1) == model.num_features
|
341 |
+
model.reset_classifier(0)
|
342 |
+
model.to(torch_device)
|
343 |
+
outputs = model.forward(input_tensor)
|
344 |
+
if isinstance(outputs, (tuple, list)):
|
345 |
+
outputs = outputs[0]
|
346 |
+
if feat_dim is None:
|
347 |
+
feat_dim = -1 if outputs.ndim == 3 else 1
|
348 |
+
assert outputs.shape[feat_dim] == model.head_hidden_size, 'pooled num_features != config'
|
349 |
+
assert outputs.shape == outputs_pre.shape
|
350 |
+
|
351 |
+
model = create_model(model_name, pretrained=False, num_classes=0).eval()
|
352 |
+
model.to(torch_device)
|
353 |
+
outputs = model.forward(input_tensor)
|
354 |
+
if isinstance(outputs, (tuple, list)):
|
355 |
+
outputs = outputs[0]
|
356 |
+
if feat_dim is None:
|
357 |
+
feat_dim = -1 if outputs.ndim == 3 else 1
|
358 |
+
assert outputs.shape[feat_dim] == model.num_features
|
359 |
+
|
360 |
+
# check classifier name matches default_cfg
|
361 |
+
if cfg.get('num_classes', None):
|
362 |
+
classifier = cfg['classifier']
|
363 |
+
if not isinstance(classifier, (tuple, list)):
|
364 |
+
classifier = classifier,
|
365 |
+
for c in classifier:
|
366 |
+
assert c + ".weight" in state_dict.keys(), f'{c} not in model params'
|
367 |
+
|
368 |
+
# check first conv(s) names match default_cfg
|
369 |
+
first_conv = cfg['first_conv']
|
370 |
+
if isinstance(first_conv, str):
|
371 |
+
first_conv = (first_conv,)
|
372 |
+
assert isinstance(first_conv, (tuple, list))
|
373 |
+
for fc in first_conv:
|
374 |
+
assert fc + ".weight" in state_dict.keys(), f'{fc} not in model params'
|
375 |
+
|
376 |
+
|
377 |
+
if 'GITHUB_ACTIONS' not in os.environ:
|
378 |
+
@pytest.mark.timeout(240)
|
379 |
+
@pytest.mark.parametrize('model_name', list_models(pretrained=True))
|
380 |
+
@pytest.mark.parametrize('batch_size', [1])
|
381 |
+
def test_model_load_pretrained(model_name, batch_size):
|
382 |
+
"""Create that pretrained weights load, verify support for in_chans != 3 while doing so."""
|
383 |
+
in_chans = 3 if 'pruned' in model_name else 1 # pruning not currently supported with in_chans change
|
384 |
+
create_model(model_name, pretrained=True, in_chans=in_chans, num_classes=5)
|
385 |
+
create_model(model_name, pretrained=True, in_chans=in_chans, num_classes=0)
|
386 |
+
|
387 |
+
@pytest.mark.timeout(240)
|
388 |
+
@pytest.mark.parametrize('model_name', list_models(pretrained=True, exclude_filters=NON_STD_FILTERS))
|
389 |
+
@pytest.mark.parametrize('batch_size', [1])
|
390 |
+
def test_model_features_pretrained(model_name, batch_size):
|
391 |
+
"""Create that pretrained weights load when features_only==True."""
|
392 |
+
create_model(model_name, pretrained=True, features_only=True)
|
393 |
+
|
394 |
+
|
395 |
+
@pytest.mark.torchscript
|
396 |
+
@pytest.mark.timeout(timeout120)
|
397 |
+
@pytest.mark.parametrize(
|
398 |
+
'model_name', list_models(exclude_filters=EXCLUDE_FILTERS + EXCLUDE_JIT_FILTERS, name_matches_cfg=True))
|
399 |
+
@pytest.mark.parametrize('batch_size', [1])
|
400 |
+
def test_model_forward_torchscript(model_name, batch_size):
|
401 |
+
"""Run a single forward pass with each model"""
|
402 |
+
input_size = _get_input_size(model_name=model_name, target=TARGET_JIT_SIZE)
|
403 |
+
if max(input_size) > MAX_JIT_SIZE:
|
404 |
+
pytest.skip("Fixed input size model > limit.")
|
405 |
+
|
406 |
+
with set_scriptable(True):
|
407 |
+
model = create_model(model_name, pretrained=False)
|
408 |
+
model.eval()
|
409 |
+
|
410 |
+
model = torch.jit.script(model)
|
411 |
+
model.to(torch_device)
|
412 |
+
outputs = model(torch.randn((batch_size, *input_size)))
|
413 |
+
|
414 |
+
assert outputs.shape[0] == batch_size
|
415 |
+
assert not torch.isnan(outputs).any(), 'Output included NaNs'
|
416 |
+
|
417 |
+
|
418 |
+
EXCLUDE_FEAT_FILTERS = [
|
419 |
+
'*pruned*', # hopefully fix at some point
|
420 |
+
] + NON_STD_FILTERS
|
421 |
+
if 'GITHUB_ACTIONS' in os.environ: # and 'Linux' in platform.system():
|
422 |
+
# GitHub Linux runner is slower and hits memory limits sooner than MacOS, exclude bigger models
|
423 |
+
EXCLUDE_FEAT_FILTERS += ['*resnext101_32x32d', '*resnext101_32x16d']
|
424 |
+
|
425 |
+
|
426 |
+
@pytest.mark.features
|
427 |
+
@pytest.mark.timeout(120)
|
428 |
+
@pytest.mark.parametrize('model_name', list_models(exclude_filters=EXCLUDE_FILTERS + EXCLUDE_FEAT_FILTERS))
|
429 |
+
@pytest.mark.parametrize('batch_size', [1])
|
430 |
+
def test_model_forward_features(model_name, batch_size):
|
431 |
+
"""Run a single forward pass with each model in feature extraction mode"""
|
432 |
+
model = create_model(model_name, pretrained=False, features_only=True)
|
433 |
+
model.eval()
|
434 |
+
expected_channels = model.feature_info.channels()
|
435 |
+
expected_reduction = model.feature_info.reduction()
|
436 |
+
assert len(expected_channels) >= 3 # all models here should have at least 3 default feat levels
|
437 |
+
|
438 |
+
input_size = _get_input_size(model=model, target=TARGET_FFEAT_SIZE)
|
439 |
+
if max(input_size) > MAX_FFEAT_SIZE:
|
440 |
+
pytest.skip("Fixed input size model > limit.")
|
441 |
+
output_fmt = getattr(model, 'output_fmt', 'NCHW')
|
442 |
+
feat_axis = get_channel_dim(output_fmt)
|
443 |
+
spatial_axis = get_spatial_dim(output_fmt)
|
444 |
+
import math
|
445 |
+
|
446 |
+
outputs = model(torch.randn((batch_size, *input_size)))
|
447 |
+
assert len(expected_channels) == len(outputs)
|
448 |
+
spatial_size = input_size[-2:]
|
449 |
+
for e, r, o in zip(expected_channels, expected_reduction, outputs):
|
450 |
+
assert e == o.shape[feat_axis]
|
451 |
+
assert o.shape[spatial_axis[0]] <= math.ceil(spatial_size[0] / r) + 1
|
452 |
+
assert o.shape[spatial_axis[1]] <= math.ceil(spatial_size[1] / r) + 1
|
453 |
+
assert o.shape[0] == batch_size
|
454 |
+
assert not torch.isnan(o).any()
|
455 |
+
|
456 |
+
|
457 |
+
@pytest.mark.features
|
458 |
+
@pytest.mark.timeout(120)
|
459 |
+
@pytest.mark.parametrize('model_name', list_models(module=FEAT_INTER_FILTERS, exclude_filters=EXCLUDE_FILTERS + ['*pruned*']))
|
460 |
+
@pytest.mark.parametrize('batch_size', [1])
|
461 |
+
def test_model_forward_intermediates_features(model_name, batch_size):
|
462 |
+
"""Run a single forward pass with each model in feature extraction mode"""
|
463 |
+
model = create_model(model_name, pretrained=False, features_only=True, feature_cls='getter')
|
464 |
+
model.eval()
|
465 |
+
expected_channels = model.feature_info.channels()
|
466 |
+
expected_reduction = model.feature_info.reduction()
|
467 |
+
|
468 |
+
input_size = _get_input_size(model=model, target=TARGET_FFEAT_SIZE)
|
469 |
+
if max(input_size) > MAX_FFEAT_SIZE:
|
470 |
+
pytest.skip("Fixed input size model > limit.")
|
471 |
+
output_fmt = getattr(model, 'output_fmt', 'NCHW')
|
472 |
+
feat_axis = get_channel_dim(output_fmt)
|
473 |
+
spatial_axis = get_spatial_dim(output_fmt)
|
474 |
+
import math
|
475 |
+
|
476 |
+
outputs = model(torch.randn((batch_size, *input_size)))
|
477 |
+
assert len(expected_channels) == len(outputs)
|
478 |
+
spatial_size = input_size[-2:]
|
479 |
+
for e, r, o in zip(expected_channels, expected_reduction, outputs):
|
480 |
+
print(o.shape)
|
481 |
+
assert e == o.shape[feat_axis]
|
482 |
+
assert o.shape[spatial_axis[0]] <= math.ceil(spatial_size[0] / r) + 1
|
483 |
+
assert o.shape[spatial_axis[1]] <= math.ceil(spatial_size[1] / r) + 1
|
484 |
+
assert o.shape[0] == batch_size
|
485 |
+
assert not torch.isnan(o).any()
|
486 |
+
|
487 |
+
|
488 |
+
@pytest.mark.features
|
489 |
+
@pytest.mark.timeout(120)
|
490 |
+
@pytest.mark.parametrize('model_name', list_models(module=FEAT_INTER_FILTERS, exclude_filters=EXCLUDE_FILTERS + ['*pruned*']))
|
491 |
+
@pytest.mark.parametrize('batch_size', [1])
|
492 |
+
def test_model_forward_intermediates(model_name, batch_size):
|
493 |
+
"""Run a single forward pass with each model in feature extraction mode"""
|
494 |
+
model = create_model(model_name, pretrained=False)
|
495 |
+
model.eval()
|
496 |
+
feature_info = timm.models.FeatureInfo(model.feature_info, len(model.feature_info))
|
497 |
+
expected_channels = feature_info.channels()
|
498 |
+
expected_reduction = feature_info.reduction()
|
499 |
+
assert len(expected_channels) >= 3 # all models here should have at least 3 feature levels
|
500 |
+
|
501 |
+
input_size = _get_input_size(model=model, target=TARGET_FFEAT_SIZE)
|
502 |
+
if max(input_size) > MAX_FFEAT_SIZE:
|
503 |
+
pytest.skip("Fixed input size model > limit.")
|
504 |
+
output_fmt = 'NCHW' # NOTE output_fmt determined by forward_intermediates() arg, not model attribute
|
505 |
+
feat_axis = get_channel_dim(output_fmt)
|
506 |
+
spatial_axis = get_spatial_dim(output_fmt)
|
507 |
+
import math
|
508 |
+
|
509 |
+
output, intermediates = model.forward_intermediates(
|
510 |
+
torch.randn((batch_size, *input_size)),
|
511 |
+
output_fmt=output_fmt,
|
512 |
+
)
|
513 |
+
assert len(expected_channels) == len(intermediates)
|
514 |
+
spatial_size = input_size[-2:]
|
515 |
+
for e, r, o in zip(expected_channels, expected_reduction, intermediates):
|
516 |
+
assert e == o.shape[feat_axis]
|
517 |
+
assert o.shape[spatial_axis[0]] <= math.ceil(spatial_size[0] / r) + 1
|
518 |
+
assert o.shape[spatial_axis[1]] <= math.ceil(spatial_size[1] / r) + 1
|
519 |
+
assert o.shape[0] == batch_size
|
520 |
+
assert not torch.isnan(o).any()
|
521 |
+
|
522 |
+
|
523 |
+
def _create_fx_model(model, train=False):
|
524 |
+
# This block of code does a bit of juggling to handle any case where there are multiple outputs in train mode
|
525 |
+
# So we trace once and look at the graph, and get the indices of the nodes that lead into the original fx output
|
526 |
+
# node. Then we use those indices to select from train_nodes returned by torchvision get_graph_node_names
|
527 |
+
tracer_kwargs = dict(
|
528 |
+
leaf_modules=get_notrace_modules(),
|
529 |
+
autowrap_functions=get_notrace_functions(),
|
530 |
+
#enable_cpatching=True,
|
531 |
+
param_shapes_constant=True
|
532 |
+
)
|
533 |
+
train_nodes, eval_nodes = get_graph_node_names(model, tracer_kwargs=tracer_kwargs)
|
534 |
+
|
535 |
+
eval_return_nodes = [eval_nodes[-1]]
|
536 |
+
train_return_nodes = [train_nodes[-1]]
|
537 |
+
if train:
|
538 |
+
tracer = NodePathTracer(**tracer_kwargs)
|
539 |
+
graph = tracer.trace(model)
|
540 |
+
graph_nodes = list(reversed(graph.nodes))
|
541 |
+
output_node_names = [n.name for n in graph_nodes[0]._input_nodes.keys()]
|
542 |
+
graph_node_names = [n.name for n in graph_nodes]
|
543 |
+
output_node_indices = [-graph_node_names.index(node_name) for node_name in output_node_names]
|
544 |
+
train_return_nodes = [train_nodes[ix] for ix in output_node_indices]
|
545 |
+
|
546 |
+
fx_model = create_feature_extractor(
|
547 |
+
model,
|
548 |
+
train_return_nodes=train_return_nodes,
|
549 |
+
eval_return_nodes=eval_return_nodes,
|
550 |
+
tracer_kwargs=tracer_kwargs,
|
551 |
+
)
|
552 |
+
return fx_model
|
553 |
+
|
554 |
+
|
555 |
+
EXCLUDE_FX_FILTERS = ['vit_gi*', 'hiera*']
|
556 |
+
# not enough memory to run fx on more models than other tests
|
557 |
+
if 'GITHUB_ACTIONS' in os.environ:
|
558 |
+
EXCLUDE_FX_FILTERS += [
|
559 |
+
'beit_large*',
|
560 |
+
'mixer_l*',
|
561 |
+
'*nfnet_f2*',
|
562 |
+
'*resnext101_32x32d',
|
563 |
+
'resnetv2_152x2*',
|
564 |
+
'resmlp_big*',
|
565 |
+
'resnetrs270',
|
566 |
+
'swin_large*',
|
567 |
+
'vgg*',
|
568 |
+
'vit_large*',
|
569 |
+
'vit_base_patch8*',
|
570 |
+
'xcit_large*',
|
571 |
+
]
|
572 |
+
|
573 |
+
|
574 |
+
@pytest.mark.fxforward
|
575 |
+
@pytest.mark.timeout(120)
|
576 |
+
@pytest.mark.parametrize('model_name', list_models(exclude_filters=EXCLUDE_FILTERS + EXCLUDE_FX_FILTERS))
|
577 |
+
@pytest.mark.parametrize('batch_size', [1])
|
578 |
+
def test_model_forward_fx(model_name, batch_size):
|
579 |
+
"""
|
580 |
+
Symbolically trace each model and run single forward pass through the resulting GraphModule
|
581 |
+
Also check that the output of a forward pass through the GraphModule is the same as that from the original Module
|
582 |
+
"""
|
583 |
+
if not has_fx_feature_extraction:
|
584 |
+
pytest.skip("Can't test FX. Torch >= 1.10 and Torchvision >= 0.11 are required.")
|
585 |
+
|
586 |
+
model = create_model(model_name, pretrained=False)
|
587 |
+
model.eval()
|
588 |
+
|
589 |
+
input_size = _get_input_size(model=model, target=TARGET_FWD_FX_SIZE)
|
590 |
+
if max(input_size) > MAX_FWD_FX_SIZE:
|
591 |
+
pytest.skip("Fixed input size model > limit.")
|
592 |
+
with torch.no_grad():
|
593 |
+
inputs = torch.randn((batch_size, *input_size))
|
594 |
+
outputs = model(inputs)
|
595 |
+
if isinstance(outputs, tuple):
|
596 |
+
outputs = torch.cat(outputs)
|
597 |
+
|
598 |
+
model = _create_fx_model(model)
|
599 |
+
fx_outputs = tuple(model(inputs).values())
|
600 |
+
if isinstance(fx_outputs, tuple):
|
601 |
+
fx_outputs = torch.cat(fx_outputs)
|
602 |
+
|
603 |
+
assert torch.all(fx_outputs == outputs)
|
604 |
+
assert outputs.shape[0] == batch_size
|
605 |
+
assert not torch.isnan(outputs).any(), 'Output included NaNs'
|
606 |
+
|
607 |
+
|
608 |
+
@pytest.mark.fxbackward
|
609 |
+
@pytest.mark.timeout(120)
|
610 |
+
@pytest.mark.parametrize('model_name', list_models(
|
611 |
+
exclude_filters=EXCLUDE_FILTERS + EXCLUDE_FX_FILTERS, name_matches_cfg=True))
|
612 |
+
@pytest.mark.parametrize('batch_size', [2])
|
613 |
+
def test_model_backward_fx(model_name, batch_size):
|
614 |
+
"""Symbolically trace each model and run single backward pass through the resulting GraphModule"""
|
615 |
+
if not has_fx_feature_extraction:
|
616 |
+
pytest.skip("Can't test FX. Torch >= 1.10 and Torchvision >= 0.11 are required.")
|
617 |
+
|
618 |
+
input_size = _get_input_size(model_name=model_name, target=TARGET_BWD_FX_SIZE)
|
619 |
+
if max(input_size) > MAX_BWD_FX_SIZE:
|
620 |
+
pytest.skip("Fixed input size model > limit.")
|
621 |
+
|
622 |
+
model = create_model(model_name, pretrained=False, num_classes=42)
|
623 |
+
model.train()
|
624 |
+
num_params = sum([x.numel() for x in model.parameters()])
|
625 |
+
if 'GITHUB_ACTIONS' in os.environ and num_params > 100e6:
|
626 |
+
pytest.skip("Skipping FX backward test on model with more than 100M params.")
|
627 |
+
|
628 |
+
model = _create_fx_model(model, train=True)
|
629 |
+
outputs = tuple(model(torch.randn((batch_size, *input_size))).values())
|
630 |
+
if isinstance(outputs, tuple):
|
631 |
+
outputs = torch.cat(outputs)
|
632 |
+
outputs.mean().backward()
|
633 |
+
for n, x in model.named_parameters():
|
634 |
+
assert x.grad is not None, f'No gradient for {n}'
|
635 |
+
num_grad = sum([x.grad.numel() for x in model.parameters() if x.grad is not None])
|
636 |
+
|
637 |
+
assert outputs.shape[-1] == 42
|
638 |
+
assert num_params == num_grad, 'Some parameters are missing gradients'
|
639 |
+
assert not torch.isnan(outputs).any(), 'Output included NaNs'
|
640 |
+
|
641 |
+
|
642 |
+
if 'GITHUB_ACTIONS' not in os.environ:
|
643 |
+
# FIXME this test is causing GitHub actions to run out of RAM and abruptly kill the test process
|
644 |
+
|
645 |
+
# reason: model is scripted after fx tracing, but beit has torch.jit.is_scripting() control flow
|
646 |
+
EXCLUDE_FX_JIT_FILTERS = [
|
647 |
+
'deit_*_distilled_patch16_224',
|
648 |
+
'levit*',
|
649 |
+
'pit_*_distilled_224',
|
650 |
+
] + EXCLUDE_FX_FILTERS
|
651 |
+
|
652 |
+
|
653 |
+
@pytest.mark.timeout(120)
|
654 |
+
@pytest.mark.parametrize(
|
655 |
+
'model_name', list_models(
|
656 |
+
exclude_filters=EXCLUDE_FILTERS + EXCLUDE_JIT_FILTERS + EXCLUDE_FX_JIT_FILTERS, name_matches_cfg=True))
|
657 |
+
@pytest.mark.parametrize('batch_size', [1])
|
658 |
+
def test_model_forward_fx_torchscript(model_name, batch_size):
|
659 |
+
"""Symbolically trace each model, script it, and run single forward pass"""
|
660 |
+
if not has_fx_feature_extraction:
|
661 |
+
pytest.skip("Can't test FX. Torch >= 1.10 and Torchvision >= 0.11 are required.")
|
662 |
+
|
663 |
+
input_size = _get_input_size(model_name=model_name, target=TARGET_JIT_SIZE)
|
664 |
+
if max(input_size) > MAX_JIT_SIZE:
|
665 |
+
pytest.skip("Fixed input size model > limit.")
|
666 |
+
|
667 |
+
with set_scriptable(True):
|
668 |
+
model = create_model(model_name, pretrained=False)
|
669 |
+
model.eval()
|
670 |
+
|
671 |
+
model = torch.jit.script(_create_fx_model(model))
|
672 |
+
with torch.no_grad():
|
673 |
+
outputs = tuple(model(torch.randn((batch_size, *input_size))).values())
|
674 |
+
if isinstance(outputs, tuple):
|
675 |
+
outputs = torch.cat(outputs)
|
676 |
+
|
677 |
+
assert outputs.shape[0] == batch_size
|
678 |
+
assert not torch.isnan(outputs).any(), 'Output included NaNs'
|
679 |
+
|
680 |
+
@pytest.mark.timeout(120)
|
681 |
+
@pytest.mark.parametrize('model_name', ["regnetx_002"])
|
682 |
+
@pytest.mark.parametrize('batch_size', [1])
|
683 |
+
def test_model_forward_torchscript_with_features_fx(model_name, batch_size):
|
684 |
+
"""Create a model with feature extraction based on fx, script it, and run
|
685 |
+
a single forward pass"""
|
686 |
+
if not has_fx_feature_extraction:
|
687 |
+
pytest.skip("Can't test FX. Torch >= 1.10 and Torchvision >= 0.11 are required.")
|
688 |
+
|
689 |
+
allowed_models = list_models(
|
690 |
+
exclude_filters=EXCLUDE_FILTERS + EXCLUDE_JIT_FILTERS + EXCLUDE_FX_JIT_FILTERS,
|
691 |
+
name_matches_cfg=True
|
692 |
+
)
|
693 |
+
assert model_name in allowed_models, f"{model_name=} not supported for this test"
|
694 |
+
|
695 |
+
input_size = _get_input_size(model_name=model_name, target=TARGET_JIT_SIZE)
|
696 |
+
assert max(input_size) <= MAX_JIT_SIZE, "Fixed input size model > limit. Pick a different model to run this test"
|
697 |
+
|
698 |
+
with set_scriptable(True):
|
699 |
+
model = create_model(model_name, pretrained=False, features_only=True, feature_cfg={"feature_cls": "fx"})
|
700 |
+
model.eval()
|
701 |
+
|
702 |
+
model = torch.jit.script(model)
|
703 |
+
with torch.no_grad():
|
704 |
+
outputs = model(torch.randn((batch_size, *input_size)))
|
705 |
+
|
706 |
+
assert isinstance(outputs, list)
|
707 |
+
|
708 |
+
for tensor in outputs:
|
709 |
+
assert tensor.shape[0] == batch_size
|
710 |
+
assert not torch.isnan(tensor).any(), 'Output included NaNs'
|
pytorch-image-models/timm/__init__.py
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from .version import __version__
|
2 |
+
from .layers import is_scriptable, is_exportable, set_scriptable, set_exportable
|
3 |
+
from .models import create_model, list_models, list_pretrained, is_model, list_modules, model_entrypoint, \
|
4 |
+
is_model_pretrained, get_pretrained_cfg, get_pretrained_cfg_value
|
pytorch-image-models/timm/data/__init__.py
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from .auto_augment import RandAugment, AutoAugment, rand_augment_ops, auto_augment_policy,\
|
2 |
+
rand_augment_transform, auto_augment_transform
|
3 |
+
from .config import resolve_data_config, resolve_model_data_config
|
4 |
+
from .constants import *
|
5 |
+
from .dataset import ImageDataset, IterableImageDataset, AugMixDataset
|
6 |
+
from .dataset_factory import create_dataset
|
7 |
+
from .dataset_info import DatasetInfo, CustomDatasetInfo
|
8 |
+
from .imagenet_info import ImageNetInfo, infer_imagenet_subset
|
9 |
+
from .loader import create_loader
|
10 |
+
from .mixup import Mixup, FastCollateMixup
|
11 |
+
from .readers import create_reader
|
12 |
+
from .readers import get_img_extensions, is_img_extension, set_img_extensions, add_img_extensions, del_img_extensions
|
13 |
+
from .real_labels import RealLabelsImagenet
|
14 |
+
from .transforms import *
|
15 |
+
from .transforms_factory import create_transform
|
pytorch-image-models/timm/data/__pycache__/auto_augment.cpython-39.pyc
ADDED
Binary file (27.6 kB). View file
|
|
pytorch-image-models/timm/data/__pycache__/config.cpython-39.pyc
ADDED
Binary file (2.72 kB). View file
|
|
pytorch-image-models/timm/data/__pycache__/dataset.cpython-39.pyc
ADDED
Binary file (5.83 kB). View file
|
|
pytorch-image-models/timm/data/__pycache__/dataset_factory.cpython-39.pyc
ADDED
Binary file (6.03 kB). View file
|
|
pytorch-image-models/timm/data/__pycache__/dataset_info.cpython-39.pyc
ADDED
Binary file (3.35 kB). View file
|
|
pytorch-image-models/timm/data/__pycache__/distributed_sampler.cpython-39.pyc
ADDED
Binary file (4.42 kB). View file
|
|
pytorch-image-models/timm/data/__pycache__/imagenet_info.cpython-39.pyc
ADDED
Binary file (3.83 kB). View file
|
|
pytorch-image-models/timm/data/__pycache__/mixup.cpython-39.pyc
ADDED
Binary file (11.2 kB). View file
|
|
pytorch-image-models/timm/data/__pycache__/random_erasing.cpython-39.pyc
ADDED
Binary file (3.94 kB). View file
|
|
pytorch-image-models/timm/data/__pycache__/real_labels.cpython-39.pyc
ADDED
Binary file (2.52 kB). View file
|
|
pytorch-image-models/timm/data/__pycache__/transforms.cpython-39.pyc
ADDED
Binary file (18.3 kB). View file
|
|
pytorch-image-models/timm/data/__pycache__/transforms_factory.cpython-39.pyc
ADDED
Binary file (12.6 kB). View file
|
|
pytorch-image-models/timm/data/_info/imagenet12k_synsets.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
pytorch-image-models/timm/data/_info/imagenet21k_goog_synsets.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
pytorch-image-models/timm/data/_info/imagenet22k_ms_synsets.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
pytorch-image-models/timm/data/_info/imagenet22k_ms_to_12k_indices.txt
ADDED
@@ -0,0 +1,11821 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
1001
|
2 |
+
1003
|
3 |
+
1004
|
4 |
+
1005
|
5 |
+
1006
|
6 |
+
1007
|
7 |
+
1008
|
8 |
+
1009
|
9 |
+
1010
|
10 |
+
1011
|
11 |
+
1013
|
12 |
+
1014
|
13 |
+
1015
|
14 |
+
1016
|
15 |
+
1017
|
16 |
+
1018
|
17 |
+
1019
|
18 |
+
1020
|
19 |
+
1021
|
20 |
+
1023
|
21 |
+
1024
|
22 |
+
1026
|
23 |
+
1027
|
24 |
+
1028
|
25 |
+
1029
|
26 |
+
1030
|
27 |
+
1031
|
28 |
+
1032
|
29 |
+
1033
|
30 |
+
1034
|
31 |
+
1037
|
32 |
+
1038
|
33 |
+
1041
|
34 |
+
1043
|
35 |
+
1044
|
36 |
+
1045
|
37 |
+
1046
|
38 |
+
1047
|
39 |
+
1048
|
40 |
+
1049
|
41 |
+
1050
|
42 |
+
1051
|
43 |
+
1053
|
44 |
+
1055
|
45 |
+
1056
|
46 |
+
1057
|
47 |
+
1058
|
48 |
+
1060
|
49 |
+
1061
|
50 |
+
1062
|
51 |
+
1063
|
52 |
+
1064
|
53 |
+
1065
|
54 |
+
1066
|
55 |
+
1067
|
56 |
+
1068
|
57 |
+
1069
|
58 |
+
1070
|
59 |
+
1071
|
60 |
+
1072
|
61 |
+
1073
|
62 |
+
1074
|
63 |
+
1075
|
64 |
+
1076
|
65 |
+
1077
|
66 |
+
1078
|
67 |
+
1079
|
68 |
+
1080
|
69 |
+
1081
|
70 |
+
1082
|
71 |
+
1083
|
72 |
+
1084
|
73 |
+
1085
|
74 |
+
1086
|
75 |
+
1089
|
76 |
+
1090
|
77 |
+
1091
|
78 |
+
1093
|
79 |
+
1094
|
80 |
+
1095
|
81 |
+
1096
|
82 |
+
1097
|
83 |
+
1099
|
84 |
+
1100
|
85 |
+
1101
|
86 |
+
1102
|
87 |
+
1103
|
88 |
+
1105
|
89 |
+
1107
|
90 |
+
1108
|
91 |
+
1109
|
92 |
+
1110
|
93 |
+
1111
|
94 |
+
1112
|
95 |
+
1113
|
96 |
+
1114
|
97 |
+
1115
|
98 |
+
1116
|
99 |
+
1117
|
100 |
+
1118
|
101 |
+
1119
|
102 |
+
1120
|
103 |
+
1121
|
104 |
+
1122
|
105 |
+
1123
|
106 |
+
1124
|
107 |
+
1125
|
108 |
+
1126
|
109 |
+
1127
|
110 |
+
1128
|
111 |
+
1129
|
112 |
+
1130
|
113 |
+
1131
|
114 |
+
1132
|
115 |
+
1133
|
116 |
+
1134
|
117 |
+
1135
|
118 |
+
1137
|
119 |
+
1138
|
120 |
+
1140
|
121 |
+
1141
|
122 |
+
1142
|
123 |
+
1143
|
124 |
+
1144
|
125 |
+
1146
|
126 |
+
1147
|
127 |
+
1148
|
128 |
+
1149
|
129 |
+
1151
|
130 |
+
1152
|
131 |
+
1153
|
132 |
+
1154
|
133 |
+
1156
|
134 |
+
1157
|
135 |
+
1158
|
136 |
+
1159
|
137 |
+
1161
|
138 |
+
1162
|
139 |
+
1164
|
140 |
+
1165
|
141 |
+
1166
|
142 |
+
1167
|
143 |
+
1168
|
144 |
+
1169
|
145 |
+
1170
|
146 |
+
1171
|
147 |
+
1172
|
148 |
+
1173
|
149 |
+
1175
|
150 |
+
1176
|
151 |
+
1179
|
152 |
+
1180
|
153 |
+
1181
|
154 |
+
1182
|
155 |
+
1184
|
156 |
+
1188
|
157 |
+
1192
|
158 |
+
1193
|
159 |
+
1195
|
160 |
+
1196
|
161 |
+
1197
|
162 |
+
1199
|
163 |
+
1200
|
164 |
+
1203
|
165 |
+
1206
|
166 |
+
1209
|
167 |
+
1210
|
168 |
+
1211
|
169 |
+
1212
|
170 |
+
1213
|
171 |
+
1214
|
172 |
+
1215
|
173 |
+
1216
|
174 |
+
1217
|
175 |
+
1218
|
176 |
+
1219
|
177 |
+
1220
|
178 |
+
1221
|
179 |
+
1222
|
180 |
+
1223
|
181 |
+
1224
|
182 |
+
1225
|
183 |
+
1226
|
184 |
+
1227
|
185 |
+
1230
|
186 |
+
1231
|
187 |
+
1235
|
188 |
+
1249
|
189 |
+
1250
|
190 |
+
1251
|
191 |
+
1252
|
192 |
+
1253
|
193 |
+
1254
|
194 |
+
1289
|
195 |
+
1292
|
196 |
+
1295
|
197 |
+
1301
|
198 |
+
1306
|
199 |
+
1307
|
200 |
+
1312
|
201 |
+
1313
|
202 |
+
1315
|
203 |
+
1317
|
204 |
+
1320
|
205 |
+
1324
|
206 |
+
1325
|
207 |
+
1326
|
208 |
+
1327
|
209 |
+
1332
|
210 |
+
1341
|
211 |
+
1343
|
212 |
+
1347
|
213 |
+
1352
|
214 |
+
1353
|
215 |
+
1354
|
216 |
+
1356
|
217 |
+
0
|
218 |
+
1359
|
219 |
+
1365
|
220 |
+
1366
|
221 |
+
1
|
222 |
+
1367
|
223 |
+
1368
|
224 |
+
1375
|
225 |
+
1377
|
226 |
+
1378
|
227 |
+
1380
|
228 |
+
1381
|
229 |
+
1382
|
230 |
+
1383
|
231 |
+
1384
|
232 |
+
1390
|
233 |
+
1393
|
234 |
+
1396
|
235 |
+
1400
|
236 |
+
1403
|
237 |
+
1406
|
238 |
+
1408
|
239 |
+
1409
|
240 |
+
1411
|
241 |
+
1413
|
242 |
+
1414
|
243 |
+
1416
|
244 |
+
1420
|
245 |
+
1421
|
246 |
+
1422
|
247 |
+
1428
|
248 |
+
1429
|
249 |
+
1438
|
250 |
+
1439
|
251 |
+
1449
|
252 |
+
1450
|
253 |
+
1453
|
254 |
+
1454
|
255 |
+
1455
|
256 |
+
2
|
257 |
+
1458
|
258 |
+
1461
|
259 |
+
1462
|
260 |
+
1463
|
261 |
+
1465
|
262 |
+
1466
|
263 |
+
1467
|
264 |
+
1468
|
265 |
+
1469
|
266 |
+
1470
|
267 |
+
1471
|
268 |
+
3
|
269 |
+
1473
|
270 |
+
1475
|
271 |
+
1478
|
272 |
+
4
|
273 |
+
1484
|
274 |
+
1485
|
275 |
+
1486
|
276 |
+
5
|
277 |
+
1487
|
278 |
+
6
|
279 |
+
1492
|
280 |
+
1493
|
281 |
+
1495
|
282 |
+
1496
|
283 |
+
1498
|
284 |
+
1503
|
285 |
+
1504
|
286 |
+
1505
|
287 |
+
1506
|
288 |
+
7
|
289 |
+
1507
|
290 |
+
8
|
291 |
+
1511
|
292 |
+
1514
|
293 |
+
1515
|
294 |
+
9
|
295 |
+
1516
|
296 |
+
1517
|
297 |
+
1518
|
298 |
+
1519
|
299 |
+
1520
|
300 |
+
1523
|
301 |
+
1526
|
302 |
+
1528
|
303 |
+
1529
|
304 |
+
1530
|
305 |
+
1531
|
306 |
+
1532
|
307 |
+
1533
|
308 |
+
1534
|
309 |
+
1535
|
310 |
+
1536
|
311 |
+
10
|
312 |
+
11
|
313 |
+
1537
|
314 |
+
1538
|
315 |
+
1540
|
316 |
+
1541
|
317 |
+
1542
|
318 |
+
1543
|
319 |
+
12
|
320 |
+
1544
|
321 |
+
1545
|
322 |
+
1546
|
323 |
+
1547
|
324 |
+
1548
|
325 |
+
1549
|
326 |
+
13
|
327 |
+
1550
|
328 |
+
1552
|
329 |
+
1553
|
330 |
+
1554
|
331 |
+
1555
|
332 |
+
1556
|
333 |
+
1557
|
334 |
+
1558
|
335 |
+
1559
|
336 |
+
1560
|
337 |
+
14
|
338 |
+
1561
|
339 |
+
1562
|
340 |
+
1563
|
341 |
+
1565
|
342 |
+
1566
|
343 |
+
1568
|
344 |
+
1569
|
345 |
+
1570
|
346 |
+
1571
|
347 |
+
1572
|
348 |
+
1573
|
349 |
+
1574
|
350 |
+
1575
|
351 |
+
1576
|
352 |
+
1577
|
353 |
+
1580
|
354 |
+
1581
|
355 |
+
1583
|
356 |
+
1586
|
357 |
+
1587
|
358 |
+
1588
|
359 |
+
1589
|
360 |
+
1590
|
361 |
+
1592
|
362 |
+
1593
|
363 |
+
1594
|
364 |
+
1595
|
365 |
+
1596
|
366 |
+
1597
|
367 |
+
1598
|
368 |
+
1599
|
369 |
+
1600
|
370 |
+
1601
|
371 |
+
1603
|
372 |
+
1604
|
373 |
+
1605
|
374 |
+
1606
|
375 |
+
1608
|
376 |
+
1609
|
377 |
+
1613
|
378 |
+
1614
|
379 |
+
1615
|
380 |
+
1616
|
381 |
+
1617
|
382 |
+
1619
|
383 |
+
1620
|
384 |
+
1621
|
385 |
+
1622
|
386 |
+
1623
|
387 |
+
1624
|
388 |
+
1625
|
389 |
+
15
|
390 |
+
1627
|
391 |
+
1628
|
392 |
+
1629
|
393 |
+
1630
|
394 |
+
1631
|
395 |
+
16
|
396 |
+
1632
|
397 |
+
1633
|
398 |
+
1634
|
399 |
+
1636
|
400 |
+
1637
|
401 |
+
1638
|
402 |
+
1639
|
403 |
+
1640
|
404 |
+
1641
|
405 |
+
1642
|
406 |
+
1643
|
407 |
+
1644
|
408 |
+
1646
|
409 |
+
1647
|
410 |
+
1648
|
411 |
+
1649
|
412 |
+
1650
|
413 |
+
1651
|
414 |
+
1652
|
415 |
+
1653
|
416 |
+
1654
|
417 |
+
1655
|
418 |
+
1656
|
419 |
+
1657
|
420 |
+
1658
|
421 |
+
1660
|
422 |
+
1661
|
423 |
+
1662
|
424 |
+
1663
|
425 |
+
1664
|
426 |
+
1665
|
427 |
+
1666
|
428 |
+
1667
|
429 |
+
1668
|
430 |
+
1669
|
431 |
+
1670
|
432 |
+
1671
|
433 |
+
1672
|
434 |
+
1674
|
435 |
+
1675
|
436 |
+
1676
|
437 |
+
1677
|
438 |
+
1678
|
439 |
+
1679
|
440 |
+
1680
|
441 |
+
1681
|
442 |
+
1683
|
443 |
+
1684
|
444 |
+
1685
|
445 |
+
1686
|
446 |
+
1687
|
447 |
+
1688
|
448 |
+
1689
|
449 |
+
1690
|
450 |
+
1691
|
451 |
+
1693
|
452 |
+
1694
|
453 |
+
1696
|
454 |
+
1697
|
455 |
+
1698
|
456 |
+
1699
|
457 |
+
1700
|
458 |
+
1701
|
459 |
+
1702
|
460 |
+
1703
|
461 |
+
1704
|
462 |
+
1705
|
463 |
+
17
|
464 |
+
1709
|
465 |
+
1710
|
466 |
+
1712
|
467 |
+
1714
|
468 |
+
18
|
469 |
+
1715
|
470 |
+
1717
|
471 |
+
1718
|
472 |
+
1719
|
473 |
+
1720
|
474 |
+
1721
|
475 |
+
1722
|
476 |
+
1723
|
477 |
+
1724
|
478 |
+
1725
|
479 |
+
1726
|
480 |
+
1727
|
481 |
+
1728
|
482 |
+
1729
|
483 |
+
1730
|
484 |
+
1732
|
485 |
+
1733
|
486 |
+
1734
|
487 |
+
1736
|
488 |
+
1738
|
489 |
+
1739
|
490 |
+
1740
|
491 |
+
1742
|
492 |
+
1743
|
493 |
+
1744
|
494 |
+
1745
|
495 |
+
19
|
496 |
+
1746
|
497 |
+
1747
|
498 |
+
1748
|
499 |
+
1749
|
500 |
+
1750
|
501 |
+
1753
|
502 |
+
1754
|
503 |
+
1755
|
504 |
+
1756
|
505 |
+
1757
|
506 |
+
1758
|
507 |
+
1760
|
508 |
+
1761
|
509 |
+
1762
|
510 |
+
1763
|
511 |
+
1764
|
512 |
+
1765
|
513 |
+
1766
|
514 |
+
1767
|
515 |
+
1769
|
516 |
+
1770
|
517 |
+
1771
|
518 |
+
1772
|
519 |
+
1774
|
520 |
+
1776
|
521 |
+
1778
|
522 |
+
1779
|
523 |
+
20
|
524 |
+
1783
|
525 |
+
1784
|
526 |
+
1786
|
527 |
+
1787
|
528 |
+
1788
|
529 |
+
1789
|
530 |
+
1790
|
531 |
+
1791
|
532 |
+
1792
|
533 |
+
1795
|
534 |
+
1796
|
535 |
+
1797
|
536 |
+
1801
|
537 |
+
1802
|
538 |
+
1803
|
539 |
+
1804
|
540 |
+
21
|
541 |
+
1805
|
542 |
+
1806
|
543 |
+
1807
|
544 |
+
1808
|
545 |
+
1809
|
546 |
+
1810
|
547 |
+
1811
|
548 |
+
1812
|
549 |
+
1813
|
550 |
+
1814
|
551 |
+
1816
|
552 |
+
1817
|
553 |
+
1818
|
554 |
+
1819
|
555 |
+
1820
|
556 |
+
1821
|
557 |
+
1823
|
558 |
+
1824
|
559 |
+
1825
|
560 |
+
1826
|
561 |
+
1827
|
562 |
+
1828
|
563 |
+
1829
|
564 |
+
22
|
565 |
+
1830
|
566 |
+
1831
|
567 |
+
1832
|
568 |
+
1833
|
569 |
+
1834
|
570 |
+
23
|
571 |
+
1837
|
572 |
+
1838
|
573 |
+
1839
|
574 |
+
1840
|
575 |
+
1841
|
576 |
+
1842
|
577 |
+
1843
|
578 |
+
1844
|
579 |
+
1845
|
580 |
+
1846
|
581 |
+
1847
|
582 |
+
1848
|
583 |
+
1849
|
584 |
+
1850
|
585 |
+
1851
|
586 |
+
1852
|
587 |
+
1853
|
588 |
+
24
|
589 |
+
1854
|
590 |
+
1855
|
591 |
+
1856
|
592 |
+
1857
|
593 |
+
1858
|
594 |
+
1859
|
595 |
+
1860
|
596 |
+
1861
|
597 |
+
1862
|
598 |
+
1863
|
599 |
+
1864
|
600 |
+
1866
|
601 |
+
1867
|
602 |
+
1869
|
603 |
+
1870
|
604 |
+
25
|
605 |
+
1871
|
606 |
+
1873
|
607 |
+
26
|
608 |
+
1874
|
609 |
+
1876
|
610 |
+
1877
|
611 |
+
27
|
612 |
+
28
|
613 |
+
1880
|
614 |
+
29
|
615 |
+
1882
|
616 |
+
1883
|
617 |
+
1886
|
618 |
+
1889
|
619 |
+
1890
|
620 |
+
1892
|
621 |
+
1895
|
622 |
+
1901
|
623 |
+
1902
|
624 |
+
1903
|
625 |
+
1904
|
626 |
+
30
|
627 |
+
1905
|
628 |
+
1908
|
629 |
+
1910
|
630 |
+
1913
|
631 |
+
1914
|
632 |
+
31
|
633 |
+
32
|
634 |
+
1916
|
635 |
+
1917
|
636 |
+
1918
|
637 |
+
1919
|
638 |
+
1920
|
639 |
+
1921
|
640 |
+
1922
|
641 |
+
1925
|
642 |
+
1927
|
643 |
+
1928
|
644 |
+
1930
|
645 |
+
1931
|
646 |
+
1935
|
647 |
+
1936
|
648 |
+
1937
|
649 |
+
1938
|
650 |
+
1943
|
651 |
+
1946
|
652 |
+
1950
|
653 |
+
1953
|
654 |
+
1957
|
655 |
+
1958
|
656 |
+
1959
|
657 |
+
1960
|
658 |
+
33
|
659 |
+
1961
|
660 |
+
1962
|
661 |
+
1963
|
662 |
+
1964
|
663 |
+
34
|
664 |
+
1965
|
665 |
+
1966
|
666 |
+
1967
|
667 |
+
35
|
668 |
+
1968
|
669 |
+
36
|
670 |
+
1969
|
671 |
+
1970
|
672 |
+
1971
|
673 |
+
1972
|
674 |
+
37
|
675 |
+
1973
|
676 |
+
1974
|
677 |
+
1975
|
678 |
+
1976
|
679 |
+
1977
|
680 |
+
1978
|
681 |
+
1979
|
682 |
+
1981
|
683 |
+
1984
|
684 |
+
1986
|
685 |
+
1987
|
686 |
+
38
|
687 |
+
39
|
688 |
+
1990
|
689 |
+
1991
|
690 |
+
1992
|
691 |
+
1995
|
692 |
+
1996
|
693 |
+
1997
|
694 |
+
1998
|
695 |
+
1999
|
696 |
+
2000
|
697 |
+
2001
|
698 |
+
2003
|
699 |
+
2004
|
700 |
+
2005
|
701 |
+
2006
|
702 |
+
2007
|
703 |
+
40
|
704 |
+
2009
|
705 |
+
2010
|
706 |
+
2011
|
707 |
+
41
|
708 |
+
2014
|
709 |
+
2021
|
710 |
+
42
|
711 |
+
43
|
712 |
+
2023
|
713 |
+
44
|
714 |
+
2025
|
715 |
+
2026
|
716 |
+
2027
|
717 |
+
45
|
718 |
+
2030
|
719 |
+
2032
|
720 |
+
46
|
721 |
+
2033
|
722 |
+
47
|
723 |
+
2035
|
724 |
+
2036
|
725 |
+
48
|
726 |
+
2037
|
727 |
+
2038
|
728 |
+
49
|
729 |
+
2039
|
730 |
+
2042
|
731 |
+
50
|
732 |
+
2043
|
733 |
+
2044
|
734 |
+
2046
|
735 |
+
2048
|
736 |
+
51
|
737 |
+
2069
|
738 |
+
2088
|
739 |
+
2089
|
740 |
+
52
|
741 |
+
53
|
742 |
+
54
|
743 |
+
55
|
744 |
+
2092
|
745 |
+
2093
|
746 |
+
2094
|
747 |
+
2095
|
748 |
+
2096
|
749 |
+
2099
|
750 |
+
2101
|
751 |
+
2103
|
752 |
+
2104
|
753 |
+
2105
|
754 |
+
2108
|
755 |
+
2109
|
756 |
+
2110
|
757 |
+
2111
|
758 |
+
56
|
759 |
+
2112
|
760 |
+
2113
|
761 |
+
57
|
762 |
+
2114
|
763 |
+
2115
|
764 |
+
58
|
765 |
+
2120
|
766 |
+
2121
|
767 |
+
2122
|
768 |
+
2123
|
769 |
+
2125
|
770 |
+
59
|
771 |
+
60
|
772 |
+
2130
|
773 |
+
2132
|
774 |
+
2134
|
775 |
+
2135
|
776 |
+
61
|
777 |
+
2137
|
778 |
+
2138
|
779 |
+
2139
|
780 |
+
2140
|
781 |
+
2141
|
782 |
+
2142
|
783 |
+
62
|
784 |
+
2144
|
785 |
+
2145
|
786 |
+
2146
|
787 |
+
2148
|
788 |
+
2151
|
789 |
+
2152
|
790 |
+
63
|
791 |
+
2153
|
792 |
+
2154
|
793 |
+
2155
|
794 |
+
2156
|
795 |
+
2157
|
796 |
+
2158
|
797 |
+
64
|
798 |
+
2159
|
799 |
+
2160
|
800 |
+
2162
|
801 |
+
2164
|
802 |
+
65
|
803 |
+
2165
|
804 |
+
2166
|
805 |
+
2167
|
806 |
+
2168
|
807 |
+
2169
|
808 |
+
66
|
809 |
+
2170
|
810 |
+
2171
|
811 |
+
2172
|
812 |
+
2173
|
813 |
+
67
|
814 |
+
2174
|
815 |
+
2176
|
816 |
+
68
|
817 |
+
2177
|
818 |
+
2178
|
819 |
+
2180
|
820 |
+
2181
|
821 |
+
2182
|
822 |
+
2183
|
823 |
+
2184
|
824 |
+
2185
|
825 |
+
2187
|
826 |
+
69
|
827 |
+
2188
|
828 |
+
70
|
829 |
+
71
|
830 |
+
2189
|
831 |
+
2191
|
832 |
+
2193
|
833 |
+
2194
|
834 |
+
72
|
835 |
+
73
|
836 |
+
74
|
837 |
+
75
|
838 |
+
76
|
839 |
+
77
|
840 |
+
2196
|
841 |
+
78
|
842 |
+
2200
|
843 |
+
2204
|
844 |
+
2208
|
845 |
+
2210
|
846 |
+
2219
|
847 |
+
2220
|
848 |
+
79
|
849 |
+
2224
|
850 |
+
2225
|
851 |
+
2228
|
852 |
+
2233
|
853 |
+
2238
|
854 |
+
2240
|
855 |
+
2243
|
856 |
+
2244
|
857 |
+
2245
|
858 |
+
2246
|
859 |
+
2247
|
860 |
+
2248
|
861 |
+
2250
|
862 |
+
2252
|
863 |
+
2253
|
864 |
+
2255
|
865 |
+
2257
|
866 |
+
2259
|
867 |
+
2260
|
868 |
+
2261
|
869 |
+
2262
|
870 |
+
2263
|
871 |
+
80
|
872 |
+
2264
|
873 |
+
81
|
874 |
+
2268
|
875 |
+
2269
|
876 |
+
2270
|
877 |
+
2271
|
878 |
+
2272
|
879 |
+
82
|
880 |
+
2273
|
881 |
+
83
|
882 |
+
2274
|
883 |
+
2275
|
884 |
+
2278
|
885 |
+
2280
|
886 |
+
2285
|
887 |
+
2288
|
888 |
+
2289
|
889 |
+
2292
|
890 |
+
2293
|
891 |
+
2294
|
892 |
+
2296
|
893 |
+
2298
|
894 |
+
84
|
895 |
+
2300
|
896 |
+
2301
|
897 |
+
2302
|
898 |
+
85
|
899 |
+
2303
|
900 |
+
2304
|
901 |
+
86
|
902 |
+
2305
|
903 |
+
2306
|
904 |
+
2309
|
905 |
+
2310
|
906 |
+
2311
|
907 |
+
2312
|
908 |
+
2315
|
909 |
+
2317
|
910 |
+
2318
|
911 |
+
2319
|
912 |
+
2320
|
913 |
+
2321
|
914 |
+
2322
|
915 |
+
2324
|
916 |
+
2325
|
917 |
+
2326
|
918 |
+
2329
|
919 |
+
2330
|
920 |
+
2333
|
921 |
+
2337
|
922 |
+
2338
|
923 |
+
2339
|
924 |
+
87
|
925 |
+
2340
|
926 |
+
88
|
927 |
+
2341
|
928 |
+
2342
|
929 |
+
89
|
930 |
+
2343
|
931 |
+
2344
|
932 |
+
2345
|
933 |
+
2346
|
934 |
+
90
|
935 |
+
2348
|
936 |
+
2349
|
937 |
+
2351
|
938 |
+
2352
|
939 |
+
2354
|
940 |
+
2355
|
941 |
+
2357
|
942 |
+
91
|
943 |
+
2360
|
944 |
+
2362
|
945 |
+
2363
|
946 |
+
2365
|
947 |
+
2366
|
948 |
+
2367
|
949 |
+
2368
|
950 |
+
92
|
951 |
+
93
|
952 |
+
2369
|
953 |
+
2370
|
954 |
+
2372
|
955 |
+
2375
|
956 |
+
2376
|
957 |
+
94
|
958 |
+
2380
|
959 |
+
2381
|
960 |
+
2382
|
961 |
+
2387
|
962 |
+
2390
|
963 |
+
2391
|
964 |
+
2392
|
965 |
+
2393
|
966 |
+
2394
|
967 |
+
2396
|
968 |
+
2398
|
969 |
+
2399
|
970 |
+
2400
|
971 |
+
2401
|
972 |
+
2402
|
973 |
+
2404
|
974 |
+
2405
|
975 |
+
95
|
976 |
+
96
|
977 |
+
2407
|
978 |
+
2408
|
979 |
+
2409
|
980 |
+
2411
|
981 |
+
2412
|
982 |
+
2414
|
983 |
+
97
|
984 |
+
2415
|
985 |
+
2416
|
986 |
+
2417
|
987 |
+
2418
|
988 |
+
2419
|
989 |
+
2420
|
990 |
+
2421
|
991 |
+
2422
|
992 |
+
2423
|
993 |
+
2424
|
994 |
+
2425
|
995 |
+
2426
|
996 |
+
2427
|
997 |
+
2428
|
998 |
+
2429
|
999 |
+
2430
|
1000 |
+
2431
|
1001 |
+
2432
|
1002 |
+
2433
|
1003 |
+
2434
|
1004 |
+
2435
|
1005 |
+
2436
|
1006 |
+
2437
|
1007 |
+
2438
|
1008 |
+
2439
|
1009 |
+
2440
|
1010 |
+
2441
|
1011 |
+
2442
|
1012 |
+
2443
|
1013 |
+
2444
|
1014 |
+
2445
|
1015 |
+
2446
|
1016 |
+
2447
|
1017 |
+
2448
|
1018 |
+
2449
|
1019 |
+
2450
|
1020 |
+
2451
|
1021 |
+
2452
|
1022 |
+
2453
|
1023 |
+
98
|
1024 |
+
2454
|
1025 |
+
2455
|
1026 |
+
99
|
1027 |
+
2456
|
1028 |
+
2457
|
1029 |
+
2458
|
1030 |
+
2459
|
1031 |
+
2460
|
1032 |
+
2461
|
1033 |
+
2462
|
1034 |
+
2463
|
1035 |
+
2464
|
1036 |
+
2465
|
1037 |
+
2466
|
1038 |
+
2467
|
1039 |
+
2468
|
1040 |
+
2469
|
1041 |
+
2470
|
1042 |
+
2471
|
1043 |
+
2472
|
1044 |
+
2473
|
1045 |
+
2474
|
1046 |
+
2475
|
1047 |
+
2476
|
1048 |
+
100
|
1049 |
+
2477
|
1050 |
+
2481
|
1051 |
+
2482
|
1052 |
+
101
|
1053 |
+
2484
|
1054 |
+
102
|
1055 |
+
2485
|
1056 |
+
103
|
1057 |
+
2486
|
1058 |
+
2487
|
1059 |
+
2488
|
1060 |
+
2490
|
1061 |
+
2491
|
1062 |
+
2493
|
1063 |
+
2494
|
1064 |
+
104
|
1065 |
+
2495
|
1066 |
+
2498
|
1067 |
+
2499
|
1068 |
+
2500
|
1069 |
+
2506
|
1070 |
+
2509
|
1071 |
+
105
|
1072 |
+
106
|
1073 |
+
2515
|
1074 |
+
2517
|
1075 |
+
2519
|
1076 |
+
2520
|
1077 |
+
2521
|
1078 |
+
2522
|
1079 |
+
2523
|
1080 |
+
2524
|
1081 |
+
2525
|
1082 |
+
2526
|
1083 |
+
2527
|
1084 |
+
2529
|
1085 |
+
2536
|
1086 |
+
2537
|
1087 |
+
2545
|
1088 |
+
2549
|
1089 |
+
2552
|
1090 |
+
2555
|
1091 |
+
2556
|
1092 |
+
2558
|
1093 |
+
2560
|
1094 |
+
2561
|
1095 |
+
2562
|
1096 |
+
2564
|
1097 |
+
2565
|
1098 |
+
2572
|
1099 |
+
107
|
1100 |
+
2580
|
1101 |
+
108
|
1102 |
+
2584
|
1103 |
+
2585
|
1104 |
+
2586
|
1105 |
+
2587
|
1106 |
+
2588
|
1107 |
+
2589
|
1108 |
+
2591
|
1109 |
+
109
|
1110 |
+
2592
|
1111 |
+
2593
|
1112 |
+
2594
|
1113 |
+
2599
|
1114 |
+
2602
|
1115 |
+
110
|
1116 |
+
111
|
1117 |
+
2627
|
1118 |
+
2630
|
1119 |
+
2635
|
1120 |
+
2636
|
1121 |
+
2638
|
1122 |
+
2642
|
1123 |
+
2643
|
1124 |
+
112
|
1125 |
+
2645
|
1126 |
+
113
|
1127 |
+
2646
|
1128 |
+
2647
|
1129 |
+
2648
|
1130 |
+
114
|
1131 |
+
2652
|
1132 |
+
2655
|
1133 |
+
2656
|
1134 |
+
2658
|
1135 |
+
2659
|
1136 |
+
115
|
1137 |
+
2662
|
1138 |
+
2663
|
1139 |
+
2666
|
1140 |
+
2667
|
1141 |
+
2668
|
1142 |
+
116
|
1143 |
+
2670
|
1144 |
+
2671
|
1145 |
+
2672
|
1146 |
+
2673
|
1147 |
+
2674
|
1148 |
+
2675
|
1149 |
+
2676
|
1150 |
+
2678
|
1151 |
+
2680
|
1152 |
+
2681
|
1153 |
+
2682
|
1154 |
+
2683
|
1155 |
+
2686
|
1156 |
+
2689
|
1157 |
+
2691
|
1158 |
+
2692
|
1159 |
+
2693
|
1160 |
+
2694
|
1161 |
+
2697
|
1162 |
+
2698
|
1163 |
+
117
|
1164 |
+
2706
|
1165 |
+
2707
|
1166 |
+
2709
|
1167 |
+
2713
|
1168 |
+
2715
|
1169 |
+
2717
|
1170 |
+
2718
|
1171 |
+
2719
|
1172 |
+
118
|
1173 |
+
119
|
1174 |
+
2727
|
1175 |
+
120
|
1176 |
+
121
|
1177 |
+
2730
|
1178 |
+
2731
|
1179 |
+
2732
|
1180 |
+
122
|
1181 |
+
2736
|
1182 |
+
123
|
1183 |
+
124
|
1184 |
+
2737
|
1185 |
+
125
|
1186 |
+
2739
|
1187 |
+
2741
|
1188 |
+
2748
|
1189 |
+
126
|
1190 |
+
2750
|
1191 |
+
2751
|
1192 |
+
2754
|
1193 |
+
2757
|
1194 |
+
2764
|
1195 |
+
2765
|
1196 |
+
2766
|
1197 |
+
2768
|
1198 |
+
2769
|
1199 |
+
127
|
1200 |
+
128
|
1201 |
+
2770
|
1202 |
+
2771
|
1203 |
+
2772
|
1204 |
+
2773
|
1205 |
+
2774
|
1206 |
+
2775
|
1207 |
+
2776
|
1208 |
+
2777
|
1209 |
+
2778
|
1210 |
+
2779
|
1211 |
+
2780
|
1212 |
+
129
|
1213 |
+
2781
|
1214 |
+
2782
|
1215 |
+
130
|
1216 |
+
2783
|
1217 |
+
2784
|
1218 |
+
2785
|
1219 |
+
2786
|
1220 |
+
131
|
1221 |
+
2787
|
1222 |
+
2788
|
1223 |
+
2789
|
1224 |
+
132
|
1225 |
+
2790
|
1226 |
+
2791
|
1227 |
+
2792
|
1228 |
+
2793
|
1229 |
+
2794
|
1230 |
+
133
|
1231 |
+
2795
|
1232 |
+
2796
|
1233 |
+
2797
|
1234 |
+
134
|
1235 |
+
2798
|
1236 |
+
2799
|
1237 |
+
135
|
1238 |
+
2800
|
1239 |
+
2801
|
1240 |
+
2802
|
1241 |
+
2803
|
1242 |
+
2804
|
1243 |
+
2806
|
1244 |
+
2807
|
1245 |
+
2808
|
1246 |
+
2809
|
1247 |
+
2810
|
1248 |
+
136
|
1249 |
+
2811
|
1250 |
+
2812
|
1251 |
+
2813
|
1252 |
+
137
|
1253 |
+
2814
|
1254 |
+
138
|
1255 |
+
2815
|
1256 |
+
2817
|
1257 |
+
2820
|
1258 |
+
2822
|
1259 |
+
2823
|
1260 |
+
2824
|
1261 |
+
2825
|
1262 |
+
2826
|
1263 |
+
2827
|
1264 |
+
2828
|
1265 |
+
2829
|
1266 |
+
2830
|
1267 |
+
139
|
1268 |
+
2831
|
1269 |
+
2832
|
1270 |
+
2833
|
1271 |
+
2834
|
1272 |
+
2835
|
1273 |
+
2836
|
1274 |
+
140
|
1275 |
+
2837
|
1276 |
+
141
|
1277 |
+
2838
|
1278 |
+
2839
|
1279 |
+
2840
|
1280 |
+
2841
|
1281 |
+
2842
|
1282 |
+
2843
|
1283 |
+
2844
|
1284 |
+
2845
|
1285 |
+
2846
|
1286 |
+
2848
|
1287 |
+
2850
|
1288 |
+
2851
|
1289 |
+
2853
|
1290 |
+
2854
|
1291 |
+
2855
|
1292 |
+
2856
|
1293 |
+
2857
|
1294 |
+
142
|
1295 |
+
2859
|
1296 |
+
2861
|
1297 |
+
2862
|
1298 |
+
2864
|
1299 |
+
2865
|
1300 |
+
2866
|
1301 |
+
2867
|
1302 |
+
2868
|
1303 |
+
2871
|
1304 |
+
2873
|
1305 |
+
143
|
1306 |
+
2874
|
1307 |
+
2875
|
1308 |
+
2877
|
1309 |
+
2878
|
1310 |
+
2879
|
1311 |
+
2882
|
1312 |
+
2884
|
1313 |
+
2885
|
1314 |
+
2886
|
1315 |
+
2887
|
1316 |
+
2888
|
1317 |
+
2889
|
1318 |
+
2890
|
1319 |
+
2891
|
1320 |
+
2892
|
1321 |
+
2893
|
1322 |
+
2894
|
1323 |
+
2895
|
1324 |
+
2896
|
1325 |
+
2897
|
1326 |
+
2898
|
1327 |
+
2899
|
1328 |
+
2900
|
1329 |
+
2901
|
1330 |
+
2902
|
1331 |
+
2903
|
1332 |
+
2904
|
1333 |
+
2905
|
1334 |
+
2906
|
1335 |
+
2907
|
1336 |
+
2908
|
1337 |
+
2909
|
1338 |
+
2910
|
1339 |
+
2911
|
1340 |
+
2912
|
1341 |
+
2914
|
1342 |
+
2916
|
1343 |
+
2917
|
1344 |
+
2918
|
1345 |
+
2919
|
1346 |
+
2920
|
1347 |
+
2921
|
1348 |
+
144
|
1349 |
+
2923
|
1350 |
+
2924
|
1351 |
+
2925
|
1352 |
+
2926
|
1353 |
+
2927
|
1354 |
+
2928
|
1355 |
+
2929
|
1356 |
+
2930
|
1357 |
+
2931
|
1358 |
+
2932
|
1359 |
+
2933
|
1360 |
+
2934
|
1361 |
+
2935
|
1362 |
+
145
|
1363 |
+
2936
|
1364 |
+
2937
|
1365 |
+
2938
|
1366 |
+
2939
|
1367 |
+
146
|
1368 |
+
2941
|
1369 |
+
2943
|
1370 |
+
2946
|
1371 |
+
2947
|
1372 |
+
2948
|
1373 |
+
2949
|
1374 |
+
2953
|
1375 |
+
2954
|
1376 |
+
2955
|
1377 |
+
2956
|
1378 |
+
2957
|
1379 |
+
2959
|
1380 |
+
2960
|
1381 |
+
2961
|
1382 |
+
2962
|
1383 |
+
2963
|
1384 |
+
147
|
1385 |
+
2965
|
1386 |
+
2966
|
1387 |
+
2970
|
1388 |
+
2971
|
1389 |
+
2972
|
1390 |
+
2973
|
1391 |
+
2974
|
1392 |
+
2975
|
1393 |
+
2976
|
1394 |
+
148
|
1395 |
+
2980
|
1396 |
+
2981
|
1397 |
+
2983
|
1398 |
+
2985
|
1399 |
+
149
|
1400 |
+
2987
|
1401 |
+
2988
|
1402 |
+
2989
|
1403 |
+
2990
|
1404 |
+
2991
|
1405 |
+
2992
|
1406 |
+
2993
|
1407 |
+
2994
|
1408 |
+
2995
|
1409 |
+
2996
|
1410 |
+
150
|
1411 |
+
2997
|
1412 |
+
2998
|
1413 |
+
2999
|
1414 |
+
3000
|
1415 |
+
3001
|
1416 |
+
3002
|
1417 |
+
3003
|
1418 |
+
3004
|
1419 |
+
3005
|
1420 |
+
3007
|
1421 |
+
3008
|
1422 |
+
3012
|
1423 |
+
3013
|
1424 |
+
3014
|
1425 |
+
3016
|
1426 |
+
3017
|
1427 |
+
3018
|
1428 |
+
3021
|
1429 |
+
3022
|
1430 |
+
151
|
1431 |
+
153
|
1432 |
+
154
|
1433 |
+
155
|
1434 |
+
3024
|
1435 |
+
156
|
1436 |
+
3025
|
1437 |
+
157
|
1438 |
+
158
|
1439 |
+
3026
|
1440 |
+
159
|
1441 |
+
3028
|
1442 |
+
160
|
1443 |
+
161
|
1444 |
+
162
|
1445 |
+
163
|
1446 |
+
164
|
1447 |
+
3030
|
1448 |
+
3032
|
1449 |
+
3033
|
1450 |
+
3034
|
1451 |
+
167
|
1452 |
+
168
|
1453 |
+
3038
|
1454 |
+
169
|
1455 |
+
170
|
1456 |
+
3039
|
1457 |
+
171
|
1458 |
+
172
|
1459 |
+
173
|
1460 |
+
174
|
1461 |
+
176
|
1462 |
+
177
|
1463 |
+
178
|
1464 |
+
3041
|
1465 |
+
3042
|
1466 |
+
179
|
1467 |
+
180
|
1468 |
+
181
|
1469 |
+
182
|
1470 |
+
183
|
1471 |
+
184
|
1472 |
+
185
|
1473 |
+
186
|
1474 |
+
187
|
1475 |
+
3043
|
1476 |
+
3044
|
1477 |
+
3045
|
1478 |
+
3046
|
1479 |
+
188
|
1480 |
+
3048
|
1481 |
+
189
|
1482 |
+
3049
|
1483 |
+
190
|
1484 |
+
191
|
1485 |
+
192
|
1486 |
+
193
|
1487 |
+
194
|
1488 |
+
195
|
1489 |
+
3050
|
1490 |
+
196
|
1491 |
+
197
|
1492 |
+
198
|
1493 |
+
199
|
1494 |
+
200
|
1495 |
+
201
|
1496 |
+
3051
|
1497 |
+
202
|
1498 |
+
203
|
1499 |
+
204
|
1500 |
+
3053
|
1501 |
+
3054
|
1502 |
+
3055
|
1503 |
+
3056
|
1504 |
+
205
|
1505 |
+
206
|
1506 |
+
207
|
1507 |
+
208
|
1508 |
+
209
|
1509 |
+
3057
|
1510 |
+
210
|
1511 |
+
3058
|
1512 |
+
211
|
1513 |
+
212
|
1514 |
+
213
|
1515 |
+
214
|
1516 |
+
3059
|
1517 |
+
215
|
1518 |
+
216
|
1519 |
+
3061
|
1520 |
+
217
|
1521 |
+
218
|
1522 |
+
219
|
1523 |
+
220
|
1524 |
+
3062
|
1525 |
+
221
|
1526 |
+
3065
|
1527 |
+
3066
|
1528 |
+
222
|
1529 |
+
3068
|
1530 |
+
223
|
1531 |
+
3069
|
1532 |
+
3070
|
1533 |
+
224
|
1534 |
+
225
|
1535 |
+
226
|
1536 |
+
227
|
1537 |
+
228
|
1538 |
+
229
|
1539 |
+
230
|
1540 |
+
231
|
1541 |
+
232
|
1542 |
+
233
|
1543 |
+
234
|
1544 |
+
235
|
1545 |
+
3071
|
1546 |
+
3072
|
1547 |
+
236
|
1548 |
+
237
|
1549 |
+
3073
|
1550 |
+
238
|
1551 |
+
239
|
1552 |
+
240
|
1553 |
+
241
|
1554 |
+
242
|
1555 |
+
3074
|
1556 |
+
243
|
1557 |
+
244
|
1558 |
+
3075
|
1559 |
+
245
|
1560 |
+
246
|
1561 |
+
247
|
1562 |
+
3080
|
1563 |
+
248
|
1564 |
+
249
|
1565 |
+
250
|
1566 |
+
251
|
1567 |
+
252
|
1568 |
+
253
|
1569 |
+
254
|
1570 |
+
255
|
1571 |
+
256
|
1572 |
+
257
|
1573 |
+
3082
|
1574 |
+
258
|
1575 |
+
259
|
1576 |
+
260
|
1577 |
+
261
|
1578 |
+
3083
|
1579 |
+
3084
|
1580 |
+
263
|
1581 |
+
264
|
1582 |
+
3085
|
1583 |
+
265
|
1584 |
+
266
|
1585 |
+
267
|
1586 |
+
3087
|
1587 |
+
269
|
1588 |
+
270
|
1589 |
+
271
|
1590 |
+
272
|
1591 |
+
3089
|
1592 |
+
3090
|
1593 |
+
273
|
1594 |
+
274
|
1595 |
+
275
|
1596 |
+
276
|
1597 |
+
3093
|
1598 |
+
3095
|
1599 |
+
3097
|
1600 |
+
277
|
1601 |
+
3102
|
1602 |
+
3103
|
1603 |
+
278
|
1604 |
+
279
|
1605 |
+
280
|
1606 |
+
3105
|
1607 |
+
3106
|
1608 |
+
3107
|
1609 |
+
3108
|
1610 |
+
3109
|
1611 |
+
3110
|
1612 |
+
3111
|
1613 |
+
3112
|
1614 |
+
3114
|
1615 |
+
3115
|
1616 |
+
281
|
1617 |
+
282
|
1618 |
+
3116
|
1619 |
+
283
|
1620 |
+
3117
|
1621 |
+
284
|
1622 |
+
3118
|
1623 |
+
3119
|
1624 |
+
285
|
1625 |
+
3121
|
1626 |
+
3122
|
1627 |
+
3123
|
1628 |
+
3124
|
1629 |
+
3125
|
1630 |
+
286
|
1631 |
+
3126
|
1632 |
+
3129
|
1633 |
+
3130
|
1634 |
+
3132
|
1635 |
+
3133
|
1636 |
+
287
|
1637 |
+
3134
|
1638 |
+
3135
|
1639 |
+
3136
|
1640 |
+
3137
|
1641 |
+
3138
|
1642 |
+
3139
|
1643 |
+
288
|
1644 |
+
3141
|
1645 |
+
289
|
1646 |
+
290
|
1647 |
+
291
|
1648 |
+
3142
|
1649 |
+
292
|
1650 |
+
3144
|
1651 |
+
3145
|
1652 |
+
3146
|
1653 |
+
293
|
1654 |
+
3150
|
1655 |
+
294
|
1656 |
+
3152
|
1657 |
+
3153
|
1658 |
+
3154
|
1659 |
+
295
|
1660 |
+
3156
|
1661 |
+
296
|
1662 |
+
297
|
1663 |
+
3158
|
1664 |
+
3161
|
1665 |
+
3163
|
1666 |
+
3165
|
1667 |
+
298
|
1668 |
+
299
|
1669 |
+
3170
|
1670 |
+
3171
|
1671 |
+
3172
|
1672 |
+
3173
|
1673 |
+
3174
|
1674 |
+
3194
|
1675 |
+
3195
|
1676 |
+
3197
|
1677 |
+
3198
|
1678 |
+
3199
|
1679 |
+
3200
|
1680 |
+
3205
|
1681 |
+
3209
|
1682 |
+
3212
|
1683 |
+
3213
|
1684 |
+
3215
|
1685 |
+
3219
|
1686 |
+
3220
|
1687 |
+
3222
|
1688 |
+
3223
|
1689 |
+
3225
|
1690 |
+
3226
|
1691 |
+
3228
|
1692 |
+
3230
|
1693 |
+
3231
|
1694 |
+
300
|
1695 |
+
301
|
1696 |
+
3232
|
1697 |
+
3234
|
1698 |
+
3235
|
1699 |
+
302
|
1700 |
+
3237
|
1701 |
+
3239
|
1702 |
+
303
|
1703 |
+
3241
|
1704 |
+
304
|
1705 |
+
3243
|
1706 |
+
3244
|
1707 |
+
305
|
1708 |
+
3252
|
1709 |
+
3255
|
1710 |
+
3256
|
1711 |
+
3257
|
1712 |
+
306
|
1713 |
+
3260
|
1714 |
+
3261
|
1715 |
+
3262
|
1716 |
+
3263
|
1717 |
+
3264
|
1718 |
+
3265
|
1719 |
+
3266
|
1720 |
+
307
|
1721 |
+
3279
|
1722 |
+
3280
|
1723 |
+
3281
|
1724 |
+
3282
|
1725 |
+
3287
|
1726 |
+
3289
|
1727 |
+
3290
|
1728 |
+
3301
|
1729 |
+
308
|
1730 |
+
3304
|
1731 |
+
3306
|
1732 |
+
3307
|
1733 |
+
3308
|
1734 |
+
3309
|
1735 |
+
3316
|
1736 |
+
3317
|
1737 |
+
3318
|
1738 |
+
3319
|
1739 |
+
3321
|
1740 |
+
3322
|
1741 |
+
3326
|
1742 |
+
3329
|
1743 |
+
3330
|
1744 |
+
3332
|
1745 |
+
3333
|
1746 |
+
3335
|
1747 |
+
3336
|
1748 |
+
3340
|
1749 |
+
3345
|
1750 |
+
3346
|
1751 |
+
3347
|
1752 |
+
309
|
1753 |
+
3349
|
1754 |
+
3350
|
1755 |
+
3353
|
1756 |
+
3354
|
1757 |
+
3355
|
1758 |
+
3356
|
1759 |
+
3358
|
1760 |
+
3359
|
1761 |
+
3360
|
1762 |
+
3362
|
1763 |
+
3364
|
1764 |
+
3365
|
1765 |
+
3367
|
1766 |
+
3369
|
1767 |
+
3370
|
1768 |
+
3371
|
1769 |
+
3372
|
1770 |
+
3373
|
1771 |
+
3374
|
1772 |
+
3376
|
1773 |
+
3379
|
1774 |
+
3381
|
1775 |
+
3382
|
1776 |
+
3383
|
1777 |
+
3384
|
1778 |
+
3386
|
1779 |
+
3389
|
1780 |
+
310
|
1781 |
+
3394
|
1782 |
+
3395
|
1783 |
+
3396
|
1784 |
+
3397
|
1785 |
+
3401
|
1786 |
+
3404
|
1787 |
+
311
|
1788 |
+
3411
|
1789 |
+
3412
|
1790 |
+
3413
|
1791 |
+
3415
|
1792 |
+
3416
|
1793 |
+
3418
|
1794 |
+
312
|
1795 |
+
3419
|
1796 |
+
3421
|
1797 |
+
3424
|
1798 |
+
313
|
1799 |
+
3425
|
1800 |
+
314
|
1801 |
+
3427
|
1802 |
+
3428
|
1803 |
+
3430
|
1804 |
+
315
|
1805 |
+
3432
|
1806 |
+
3433
|
1807 |
+
3434
|
1808 |
+
3436
|
1809 |
+
3444
|
1810 |
+
3445
|
1811 |
+
3446
|
1812 |
+
3448
|
1813 |
+
3451
|
1814 |
+
3454
|
1815 |
+
3458
|
1816 |
+
3462
|
1817 |
+
3464
|
1818 |
+
3475
|
1819 |
+
3479
|
1820 |
+
3480
|
1821 |
+
3489
|
1822 |
+
316
|
1823 |
+
3495
|
1824 |
+
3497
|
1825 |
+
3498
|
1826 |
+
317
|
1827 |
+
3502
|
1828 |
+
3510
|
1829 |
+
3511
|
1830 |
+
3514
|
1831 |
+
318
|
1832 |
+
3516
|
1833 |
+
3518
|
1834 |
+
3521
|
1835 |
+
3524
|
1836 |
+
319
|
1837 |
+
320
|
1838 |
+
3531
|
1839 |
+
3538
|
1840 |
+
3539
|
1841 |
+
3540
|
1842 |
+
3541
|
1843 |
+
3542
|
1844 |
+
3543
|
1845 |
+
3544
|
1846 |
+
3545
|
1847 |
+
321
|
1848 |
+
3546
|
1849 |
+
3547
|
1850 |
+
3548
|
1851 |
+
3549
|
1852 |
+
3550
|
1853 |
+
322
|
1854 |
+
3552
|
1855 |
+
3553
|
1856 |
+
3555
|
1857 |
+
3556
|
1858 |
+
3557
|
1859 |
+
3558
|
1860 |
+
323
|
1861 |
+
324
|
1862 |
+
3560
|
1863 |
+
3561
|
1864 |
+
325
|
1865 |
+
326
|
1866 |
+
3563
|
1867 |
+
3564
|
1868 |
+
3565
|
1869 |
+
3566
|
1870 |
+
3567
|
1871 |
+
3568
|
1872 |
+
3570
|
1873 |
+
3572
|
1874 |
+
3573
|
1875 |
+
3576
|
1876 |
+
3577
|
1877 |
+
3580
|
1878 |
+
3583
|
1879 |
+
3586
|
1880 |
+
3587
|
1881 |
+
3589
|
1882 |
+
3596
|
1883 |
+
3599
|
1884 |
+
3605
|
1885 |
+
3606
|
1886 |
+
3613
|
1887 |
+
3617
|
1888 |
+
3618
|
1889 |
+
3620
|
1890 |
+
3622
|
1891 |
+
3623
|
1892 |
+
3626
|
1893 |
+
3628
|
1894 |
+
3629
|
1895 |
+
3630
|
1896 |
+
3631
|
1897 |
+
3632
|
1898 |
+
3633
|
1899 |
+
3636
|
1900 |
+
3637
|
1901 |
+
3640
|
1902 |
+
3643
|
1903 |
+
3647
|
1904 |
+
3649
|
1905 |
+
3652
|
1906 |
+
3653
|
1907 |
+
3655
|
1908 |
+
3657
|
1909 |
+
3658
|
1910 |
+
3662
|
1911 |
+
3663
|
1912 |
+
3664
|
1913 |
+
3665
|
1914 |
+
3666
|
1915 |
+
3667
|
1916 |
+
3669
|
1917 |
+
3672
|
1918 |
+
3673
|
1919 |
+
3675
|
1920 |
+
3678
|
1921 |
+
3680
|
1922 |
+
327
|
1923 |
+
3681
|
1924 |
+
3682
|
1925 |
+
328
|
1926 |
+
3684
|
1927 |
+
3685
|
1928 |
+
3689
|
1929 |
+
329
|
1930 |
+
3690
|
1931 |
+
3692
|
1932 |
+
3694
|
1933 |
+
3695
|
1934 |
+
3696
|
1935 |
+
3697
|
1936 |
+
3698
|
1937 |
+
330
|
1938 |
+
3699
|
1939 |
+
331
|
1940 |
+
3703
|
1941 |
+
3704
|
1942 |
+
3707
|
1943 |
+
3708
|
1944 |
+
332
|
1945 |
+
3710
|
1946 |
+
3713
|
1947 |
+
3714
|
1948 |
+
3715
|
1949 |
+
3718
|
1950 |
+
3720
|
1951 |
+
3722
|
1952 |
+
3723
|
1953 |
+
3726
|
1954 |
+
3727
|
1955 |
+
3734
|
1956 |
+
3736
|
1957 |
+
3741
|
1958 |
+
3746
|
1959 |
+
3753
|
1960 |
+
3755
|
1961 |
+
333
|
1962 |
+
3759
|
1963 |
+
3760
|
1964 |
+
334
|
1965 |
+
3776
|
1966 |
+
3782
|
1967 |
+
3790
|
1968 |
+
3794
|
1969 |
+
3799
|
1970 |
+
3800
|
1971 |
+
3801
|
1972 |
+
3802
|
1973 |
+
335
|
1974 |
+
3803
|
1975 |
+
3804
|
1976 |
+
3805
|
1977 |
+
3806
|
1978 |
+
3807
|
1979 |
+
3808
|
1980 |
+
3809
|
1981 |
+
3810
|
1982 |
+
3812
|
1983 |
+
3813
|
1984 |
+
3814
|
1985 |
+
3815
|
1986 |
+
3817
|
1987 |
+
3818
|
1988 |
+
336
|
1989 |
+
3823
|
1990 |
+
3824
|
1991 |
+
337
|
1992 |
+
3827
|
1993 |
+
3828
|
1994 |
+
3830
|
1995 |
+
338
|
1996 |
+
3831
|
1997 |
+
3832
|
1998 |
+
3833
|
1999 |
+
3834
|
2000 |
+
3837
|
2001 |
+
3838
|
2002 |
+
3843
|
2003 |
+
3849
|
2004 |
+
3852
|
2005 |
+
3853
|
2006 |
+
3854
|
2007 |
+
3855
|
2008 |
+
3856
|
2009 |
+
3857
|
2010 |
+
3861
|
2011 |
+
3862
|
2012 |
+
3863
|
2013 |
+
3864
|
2014 |
+
3865
|
2015 |
+
3866
|
2016 |
+
3867
|
2017 |
+
3868
|
2018 |
+
3869
|
2019 |
+
3870
|
2020 |
+
3871
|
2021 |
+
3875
|
2022 |
+
3879
|
2023 |
+
3880
|
2024 |
+
3881
|
2025 |
+
3882
|
2026 |
+
3883
|
2027 |
+
3884
|
2028 |
+
3886
|
2029 |
+
3887
|
2030 |
+
3888
|
2031 |
+
3889
|
2032 |
+
3890
|
2033 |
+
3891
|
2034 |
+
3892
|
2035 |
+
3893
|
2036 |
+
3895
|
2037 |
+
3896
|
2038 |
+
3897
|
2039 |
+
3898
|
2040 |
+
3899
|
2041 |
+
3900
|
2042 |
+
3901
|
2043 |
+
3902
|
2044 |
+
3903
|
2045 |
+
3904
|
2046 |
+
3905
|
2047 |
+
3906
|
2048 |
+
3907
|
2049 |
+
3908
|
2050 |
+
3909
|
2051 |
+
3914
|
2052 |
+
3919
|
2053 |
+
3920
|
2054 |
+
3921
|
2055 |
+
3922
|
2056 |
+
3923
|
2057 |
+
3925
|
2058 |
+
3926
|
2059 |
+
3927
|
2060 |
+
3928
|
2061 |
+
3929
|
2062 |
+
3931
|
2063 |
+
3932
|
2064 |
+
3934
|
2065 |
+
3935
|
2066 |
+
3938
|
2067 |
+
3939
|
2068 |
+
3940
|
2069 |
+
339
|
2070 |
+
3941
|
2071 |
+
3942
|
2072 |
+
3943
|
2073 |
+
3944
|
2074 |
+
3945
|
2075 |
+
3948
|
2076 |
+
3949
|
2077 |
+
3951
|
2078 |
+
3952
|
2079 |
+
340
|
2080 |
+
3956
|
2081 |
+
3957
|
2082 |
+
3958
|
2083 |
+
3960
|
2084 |
+
3961
|
2085 |
+
3963
|
2086 |
+
3964
|
2087 |
+
3965
|
2088 |
+
3966
|
2089 |
+
3967
|
2090 |
+
3968
|
2091 |
+
3969
|
2092 |
+
341
|
2093 |
+
3970
|
2094 |
+
3973
|
2095 |
+
3974
|
2096 |
+
342
|
2097 |
+
343
|
2098 |
+
3977
|
2099 |
+
3978
|
2100 |
+
344
|
2101 |
+
3980
|
2102 |
+
3982
|
2103 |
+
3983
|
2104 |
+
3984
|
2105 |
+
345
|
2106 |
+
3986
|
2107 |
+
3987
|
2108 |
+
3988
|
2109 |
+
3989
|
2110 |
+
3991
|
2111 |
+
3993
|
2112 |
+
3994
|
2113 |
+
3995
|
2114 |
+
3996
|
2115 |
+
3997
|
2116 |
+
3998
|
2117 |
+
3999
|
2118 |
+
4002
|
2119 |
+
4003
|
2120 |
+
4005
|
2121 |
+
4006
|
2122 |
+
4007
|
2123 |
+
4008
|
2124 |
+
4009
|
2125 |
+
4010
|
2126 |
+
4012
|
2127 |
+
4014
|
2128 |
+
4015
|
2129 |
+
4016
|
2130 |
+
4017
|
2131 |
+
4019
|
2132 |
+
346
|
2133 |
+
4021
|
2134 |
+
4024
|
2135 |
+
4026
|
2136 |
+
347
|
2137 |
+
4028
|
2138 |
+
4029
|
2139 |
+
4030
|
2140 |
+
4031
|
2141 |
+
4032
|
2142 |
+
348
|
2143 |
+
4033
|
2144 |
+
4034
|
2145 |
+
4035
|
2146 |
+
4040
|
2147 |
+
4041
|
2148 |
+
4043
|
2149 |
+
4048
|
2150 |
+
4049
|
2151 |
+
4051
|
2152 |
+
4052
|
2153 |
+
4055
|
2154 |
+
4056
|
2155 |
+
349
|
2156 |
+
4057
|
2157 |
+
4058
|
2158 |
+
4059
|
2159 |
+
4060
|
2160 |
+
4061
|
2161 |
+
4062
|
2162 |
+
4063
|
2163 |
+
4065
|
2164 |
+
4066
|
2165 |
+
4067
|
2166 |
+
350
|
2167 |
+
4070
|
2168 |
+
4073
|
2169 |
+
4074
|
2170 |
+
4075
|
2171 |
+
4076
|
2172 |
+
4077
|
2173 |
+
4078
|
2174 |
+
4079
|
2175 |
+
4080
|
2176 |
+
351
|
2177 |
+
4081
|
2178 |
+
352
|
2179 |
+
353
|
2180 |
+
4082
|
2181 |
+
4084
|
2182 |
+
4085
|
2183 |
+
4086
|
2184 |
+
4087
|
2185 |
+
4090
|
2186 |
+
4092
|
2187 |
+
4094
|
2188 |
+
4096
|
2189 |
+
4097
|
2190 |
+
4098
|
2191 |
+
4100
|
2192 |
+
4101
|
2193 |
+
4102
|
2194 |
+
4104
|
2195 |
+
4105
|
2196 |
+
4107
|
2197 |
+
4108
|
2198 |
+
4109
|
2199 |
+
4112
|
2200 |
+
4113
|
2201 |
+
4114
|
2202 |
+
4115
|
2203 |
+
4117
|
2204 |
+
4118
|
2205 |
+
4119
|
2206 |
+
4120
|
2207 |
+
4121
|
2208 |
+
4122
|
2209 |
+
4123
|
2210 |
+
4124
|
2211 |
+
4125
|
2212 |
+
4127
|
2213 |
+
4128
|
2214 |
+
4131
|
2215 |
+
4138
|
2216 |
+
354
|
2217 |
+
4139
|
2218 |
+
355
|
2219 |
+
4141
|
2220 |
+
4142
|
2221 |
+
4143
|
2222 |
+
4144
|
2223 |
+
4145
|
2224 |
+
4146
|
2225 |
+
356
|
2226 |
+
4148
|
2227 |
+
357
|
2228 |
+
4152
|
2229 |
+
358
|
2230 |
+
4153
|
2231 |
+
359
|
2232 |
+
360
|
2233 |
+
4157
|
2234 |
+
4158
|
2235 |
+
4159
|
2236 |
+
361
|
2237 |
+
4160
|
2238 |
+
362
|
2239 |
+
4164
|
2240 |
+
4165
|
2241 |
+
4170
|
2242 |
+
4173
|
2243 |
+
4181
|
2244 |
+
363
|
2245 |
+
4183
|
2246 |
+
4189
|
2247 |
+
364
|
2248 |
+
4190
|
2249 |
+
4191
|
2250 |
+
4194
|
2251 |
+
4195
|
2252 |
+
4197
|
2253 |
+
4198
|
2254 |
+
4200
|
2255 |
+
4201
|
2256 |
+
4202
|
2257 |
+
4203
|
2258 |
+
4204
|
2259 |
+
4209
|
2260 |
+
4210
|
2261 |
+
4211
|
2262 |
+
4219
|
2263 |
+
4220
|
2264 |
+
4222
|
2265 |
+
4234
|
2266 |
+
365
|
2267 |
+
366
|
2268 |
+
4235
|
2269 |
+
4236
|
2270 |
+
4237
|
2271 |
+
4238
|
2272 |
+
367
|
2273 |
+
4240
|
2274 |
+
4241
|
2275 |
+
4242
|
2276 |
+
368
|
2277 |
+
369
|
2278 |
+
4244
|
2279 |
+
4245
|
2280 |
+
370
|
2281 |
+
4248
|
2282 |
+
371
|
2283 |
+
372
|
2284 |
+
4251
|
2285 |
+
4252
|
2286 |
+
373
|
2287 |
+
4254
|
2288 |
+
4255
|
2289 |
+
4256
|
2290 |
+
374
|
2291 |
+
4258
|
2292 |
+
375
|
2293 |
+
4259
|
2294 |
+
376
|
2295 |
+
377
|
2296 |
+
4262
|
2297 |
+
4263
|
2298 |
+
378
|
2299 |
+
379
|
2300 |
+
380
|
2301 |
+
381
|
2302 |
+
382
|
2303 |
+
4272
|
2304 |
+
383
|
2305 |
+
4275
|
2306 |
+
4276
|
2307 |
+
4278
|
2308 |
+
384
|
2309 |
+
4280
|
2310 |
+
4286
|
2311 |
+
385
|
2312 |
+
386
|
2313 |
+
4296
|
2314 |
+
4297
|
2315 |
+
4299
|
2316 |
+
4300
|
2317 |
+
4301
|
2318 |
+
387
|
2319 |
+
388
|
2320 |
+
4303
|
2321 |
+
4305
|
2322 |
+
4306
|
2323 |
+
389
|
2324 |
+
4315
|
2325 |
+
4317
|
2326 |
+
4319
|
2327 |
+
4321
|
2328 |
+
4324
|
2329 |
+
4325
|
2330 |
+
4326
|
2331 |
+
4330
|
2332 |
+
4335
|
2333 |
+
4337
|
2334 |
+
4341
|
2335 |
+
390
|
2336 |
+
4345
|
2337 |
+
4346
|
2338 |
+
4347
|
2339 |
+
4353
|
2340 |
+
4355
|
2341 |
+
4356
|
2342 |
+
4359
|
2343 |
+
4362
|
2344 |
+
4365
|
2345 |
+
4367
|
2346 |
+
4370
|
2347 |
+
4372
|
2348 |
+
4373
|
2349 |
+
4374
|
2350 |
+
4375
|
2351 |
+
4376
|
2352 |
+
4377
|
2353 |
+
391
|
2354 |
+
4378
|
2355 |
+
4379
|
2356 |
+
4380
|
2357 |
+
4381
|
2358 |
+
4382
|
2359 |
+
4383
|
2360 |
+
4390
|
2361 |
+
4393
|
2362 |
+
4395
|
2363 |
+
4398
|
2364 |
+
4406
|
2365 |
+
4409
|
2366 |
+
4411
|
2367 |
+
4420
|
2368 |
+
4422
|
2369 |
+
4423
|
2370 |
+
4424
|
2371 |
+
4425
|
2372 |
+
4426
|
2373 |
+
4427
|
2374 |
+
4433
|
2375 |
+
4434
|
2376 |
+
4435
|
2377 |
+
4436
|
2378 |
+
4437
|
2379 |
+
4440
|
2380 |
+
4441
|
2381 |
+
4444
|
2382 |
+
4445
|
2383 |
+
4446
|
2384 |
+
4448
|
2385 |
+
4450
|
2386 |
+
4452
|
2387 |
+
4453
|
2388 |
+
4454
|
2389 |
+
4460
|
2390 |
+
4461
|
2391 |
+
4462
|
2392 |
+
4463
|
2393 |
+
4466
|
2394 |
+
4468
|
2395 |
+
4472
|
2396 |
+
4473
|
2397 |
+
4475
|
2398 |
+
4476
|
2399 |
+
4481
|
2400 |
+
4482
|
2401 |
+
4485
|
2402 |
+
4490
|
2403 |
+
4491
|
2404 |
+
4492
|
2405 |
+
4495
|
2406 |
+
4496
|
2407 |
+
4497
|
2408 |
+
4498
|
2409 |
+
4502
|
2410 |
+
4507
|
2411 |
+
4508
|
2412 |
+
4509
|
2413 |
+
4510
|
2414 |
+
4512
|
2415 |
+
4516
|
2416 |
+
4518
|
2417 |
+
4524
|
2418 |
+
4526
|
2419 |
+
4528
|
2420 |
+
4533
|
2421 |
+
4536
|
2422 |
+
4544
|
2423 |
+
4545
|
2424 |
+
4547
|
2425 |
+
4551
|
2426 |
+
4555
|
2427 |
+
4556
|
2428 |
+
4557
|
2429 |
+
4558
|
2430 |
+
4559
|
2431 |
+
4564
|
2432 |
+
4570
|
2433 |
+
4571
|
2434 |
+
4576
|
2435 |
+
4577
|
2436 |
+
4578
|
2437 |
+
392
|
2438 |
+
4579
|
2439 |
+
393
|
2440 |
+
4581
|
2441 |
+
4582
|
2442 |
+
4583
|
2443 |
+
4590
|
2444 |
+
4592
|
2445 |
+
4594
|
2446 |
+
4597
|
2447 |
+
4607
|
2448 |
+
4611
|
2449 |
+
4612
|
2450 |
+
4613
|
2451 |
+
4615
|
2452 |
+
4624
|
2453 |
+
4625
|
2454 |
+
4626
|
2455 |
+
4627
|
2456 |
+
4628
|
2457 |
+
4629
|
2458 |
+
4630
|
2459 |
+
4632
|
2460 |
+
4634
|
2461 |
+
4636
|
2462 |
+
4637
|
2463 |
+
4638
|
2464 |
+
4639
|
2465 |
+
4642
|
2466 |
+
4644
|
2467 |
+
4645
|
2468 |
+
4646
|
2469 |
+
4647
|
2470 |
+
4648
|
2471 |
+
4649
|
2472 |
+
4668
|
2473 |
+
4669
|
2474 |
+
394
|
2475 |
+
4671
|
2476 |
+
4672
|
2477 |
+
395
|
2478 |
+
4675
|
2479 |
+
396
|
2480 |
+
4677
|
2481 |
+
4678
|
2482 |
+
4692
|
2483 |
+
4693
|
2484 |
+
4697
|
2485 |
+
4699
|
2486 |
+
4700
|
2487 |
+
4702
|
2488 |
+
4703
|
2489 |
+
4704
|
2490 |
+
397
|
2491 |
+
4706
|
2492 |
+
4707
|
2493 |
+
4709
|
2494 |
+
4711
|
2495 |
+
4712
|
2496 |
+
4714
|
2497 |
+
4720
|
2498 |
+
4722
|
2499 |
+
4731
|
2500 |
+
398
|
2501 |
+
4738
|
2502 |
+
4739
|
2503 |
+
4740
|
2504 |
+
399
|
2505 |
+
4741
|
2506 |
+
4742
|
2507 |
+
4743
|
2508 |
+
4744
|
2509 |
+
4748
|
2510 |
+
4750
|
2511 |
+
400
|
2512 |
+
4751
|
2513 |
+
4752
|
2514 |
+
4753
|
2515 |
+
4754
|
2516 |
+
4757
|
2517 |
+
401
|
2518 |
+
4759
|
2519 |
+
402
|
2520 |
+
4764
|
2521 |
+
4768
|
2522 |
+
4771
|
2523 |
+
4773
|
2524 |
+
4775
|
2525 |
+
4776
|
2526 |
+
4778
|
2527 |
+
4779
|
2528 |
+
4780
|
2529 |
+
4782
|
2530 |
+
4784
|
2531 |
+
4786
|
2532 |
+
4787
|
2533 |
+
4788
|
2534 |
+
4789
|
2535 |
+
4795
|
2536 |
+
4799
|
2537 |
+
4800
|
2538 |
+
4801
|
2539 |
+
4802
|
2540 |
+
4803
|
2541 |
+
403
|
2542 |
+
4804
|
2543 |
+
4806
|
2544 |
+
4807
|
2545 |
+
4808
|
2546 |
+
4809
|
2547 |
+
4810
|
2548 |
+
4811
|
2549 |
+
4812
|
2550 |
+
4813
|
2551 |
+
404
|
2552 |
+
4816
|
2553 |
+
4817
|
2554 |
+
4818
|
2555 |
+
405
|
2556 |
+
4821
|
2557 |
+
4824
|
2558 |
+
4826
|
2559 |
+
4827
|
2560 |
+
4829
|
2561 |
+
4831
|
2562 |
+
4836
|
2563 |
+
4838
|
2564 |
+
4842
|
2565 |
+
406
|
2566 |
+
4843
|
2567 |
+
4844
|
2568 |
+
4845
|
2569 |
+
4846
|
2570 |
+
4847
|
2571 |
+
4848
|
2572 |
+
407
|
2573 |
+
4851
|
2574 |
+
4853
|
2575 |
+
4854
|
2576 |
+
408
|
2577 |
+
4855
|
2578 |
+
4856
|
2579 |
+
4857
|
2580 |
+
4858
|
2581 |
+
4860
|
2582 |
+
409
|
2583 |
+
4862
|
2584 |
+
4863
|
2585 |
+
4864
|
2586 |
+
4866
|
2587 |
+
4867
|
2588 |
+
4868
|
2589 |
+
4870
|
2590 |
+
4871
|
2591 |
+
4872
|
2592 |
+
4873
|
2593 |
+
4874
|
2594 |
+
4875
|
2595 |
+
4880
|
2596 |
+
4882
|
2597 |
+
4886
|
2598 |
+
4887
|
2599 |
+
4888
|
2600 |
+
4889
|
2601 |
+
4891
|
2602 |
+
4894
|
2603 |
+
4896
|
2604 |
+
4897
|
2605 |
+
4898
|
2606 |
+
4899
|
2607 |
+
4900
|
2608 |
+
4901
|
2609 |
+
4902
|
2610 |
+
410
|
2611 |
+
4904
|
2612 |
+
4906
|
2613 |
+
4907
|
2614 |
+
411
|
2615 |
+
4911
|
2616 |
+
4912
|
2617 |
+
4913
|
2618 |
+
4914
|
2619 |
+
4915
|
2620 |
+
4916
|
2621 |
+
4917
|
2622 |
+
4918
|
2623 |
+
4919
|
2624 |
+
4920
|
2625 |
+
4922
|
2626 |
+
4923
|
2627 |
+
4924
|
2628 |
+
4926
|
2629 |
+
4928
|
2630 |
+
4929
|
2631 |
+
4932
|
2632 |
+
4933
|
2633 |
+
4934
|
2634 |
+
4937
|
2635 |
+
4938
|
2636 |
+
4939
|
2637 |
+
4940
|
2638 |
+
4942
|
2639 |
+
4943
|
2640 |
+
4944
|
2641 |
+
4946
|
2642 |
+
4949
|
2643 |
+
4950
|
2644 |
+
4951
|
2645 |
+
4953
|
2646 |
+
4954
|
2647 |
+
4957
|
2648 |
+
4961
|
2649 |
+
4962
|
2650 |
+
412
|
2651 |
+
4966
|
2652 |
+
4967
|
2653 |
+
413
|
2654 |
+
4975
|
2655 |
+
4976
|
2656 |
+
4977
|
2657 |
+
4981
|
2658 |
+
4982
|
2659 |
+
4985
|
2660 |
+
4986
|
2661 |
+
4987
|
2662 |
+
4989
|
2663 |
+
4990
|
2664 |
+
4992
|
2665 |
+
4993
|
2666 |
+
4994
|
2667 |
+
4996
|
2668 |
+
4998
|
2669 |
+
5000
|
2670 |
+
5002
|
2671 |
+
5003
|
2672 |
+
5004
|
2673 |
+
5005
|
2674 |
+
5006
|
2675 |
+
5007
|
2676 |
+
5009
|
2677 |
+
5011
|
2678 |
+
5012
|
2679 |
+
5013
|
2680 |
+
5014
|
2681 |
+
5016
|
2682 |
+
5017
|
2683 |
+
5018
|
2684 |
+
5020
|
2685 |
+
5021
|
2686 |
+
5022
|
2687 |
+
5024
|
2688 |
+
5025
|
2689 |
+
5026
|
2690 |
+
5027
|
2691 |
+
5028
|
2692 |
+
5030
|
2693 |
+
5031
|
2694 |
+
5036
|
2695 |
+
5037
|
2696 |
+
5039
|
2697 |
+
5040
|
2698 |
+
5041
|
2699 |
+
5042
|
2700 |
+
5044
|
2701 |
+
5045
|
2702 |
+
5048
|
2703 |
+
5049
|
2704 |
+
5050
|
2705 |
+
5051
|
2706 |
+
5052
|
2707 |
+
5053
|
2708 |
+
5054
|
2709 |
+
5055
|
2710 |
+
5056
|
2711 |
+
5057
|
2712 |
+
5059
|
2713 |
+
5060
|
2714 |
+
5061
|
2715 |
+
5062
|
2716 |
+
5063
|
2717 |
+
414
|
2718 |
+
5064
|
2719 |
+
5066
|
2720 |
+
5068
|
2721 |
+
5069
|
2722 |
+
5070
|
2723 |
+
5071
|
2724 |
+
5072
|
2725 |
+
5074
|
2726 |
+
5075
|
2727 |
+
5076
|
2728 |
+
5078
|
2729 |
+
5079
|
2730 |
+
5080
|
2731 |
+
5081
|
2732 |
+
5082
|
2733 |
+
5083
|
2734 |
+
5084
|
2735 |
+
5085
|
2736 |
+
5086
|
2737 |
+
5089
|
2738 |
+
5090
|
2739 |
+
5091
|
2740 |
+
415
|
2741 |
+
5093
|
2742 |
+
5094
|
2743 |
+
5095
|
2744 |
+
416
|
2745 |
+
5098
|
2746 |
+
5101
|
2747 |
+
5102
|
2748 |
+
5104
|
2749 |
+
5105
|
2750 |
+
5112
|
2751 |
+
5113
|
2752 |
+
5114
|
2753 |
+
5116
|
2754 |
+
417
|
2755 |
+
5120
|
2756 |
+
5121
|
2757 |
+
5122
|
2758 |
+
418
|
2759 |
+
5124
|
2760 |
+
5125
|
2761 |
+
5126
|
2762 |
+
5127
|
2763 |
+
5128
|
2764 |
+
5130
|
2765 |
+
419
|
2766 |
+
5131
|
2767 |
+
5132
|
2768 |
+
5135
|
2769 |
+
5136
|
2770 |
+
5139
|
2771 |
+
420
|
2772 |
+
5140
|
2773 |
+
421
|
2774 |
+
5143
|
2775 |
+
5145
|
2776 |
+
5146
|
2777 |
+
5147
|
2778 |
+
422
|
2779 |
+
423
|
2780 |
+
424
|
2781 |
+
5149
|
2782 |
+
5151
|
2783 |
+
5152
|
2784 |
+
5154
|
2785 |
+
5155
|
2786 |
+
425
|
2787 |
+
5159
|
2788 |
+
5161
|
2789 |
+
426
|
2790 |
+
5164
|
2791 |
+
427
|
2792 |
+
5167
|
2793 |
+
5168
|
2794 |
+
5171
|
2795 |
+
5172
|
2796 |
+
5175
|
2797 |
+
428
|
2798 |
+
5176
|
2799 |
+
5177
|
2800 |
+
429
|
2801 |
+
5179
|
2802 |
+
5180
|
2803 |
+
5181
|
2804 |
+
5182
|
2805 |
+
5183
|
2806 |
+
5184
|
2807 |
+
5187
|
2808 |
+
5188
|
2809 |
+
5189
|
2810 |
+
5190
|
2811 |
+
5191
|
2812 |
+
5192
|
2813 |
+
430
|
2814 |
+
5193
|
2815 |
+
5194
|
2816 |
+
5195
|
2817 |
+
5196
|
2818 |
+
5197
|
2819 |
+
5198
|
2820 |
+
5200
|
2821 |
+
5201
|
2822 |
+
5202
|
2823 |
+
431
|
2824 |
+
5203
|
2825 |
+
432
|
2826 |
+
5206
|
2827 |
+
5207
|
2828 |
+
5208
|
2829 |
+
5209
|
2830 |
+
433
|
2831 |
+
5213
|
2832 |
+
5214
|
2833 |
+
5215
|
2834 |
+
5216
|
2835 |
+
434
|
2836 |
+
435
|
2837 |
+
5219
|
2838 |
+
5220
|
2839 |
+
5226
|
2840 |
+
5227
|
2841 |
+
5228
|
2842 |
+
5229
|
2843 |
+
5230
|
2844 |
+
5231
|
2845 |
+
5232
|
2846 |
+
5233
|
2847 |
+
5234
|
2848 |
+
5235
|
2849 |
+
5236
|
2850 |
+
5240
|
2851 |
+
5241
|
2852 |
+
5242
|
2853 |
+
5243
|
2854 |
+
5244
|
2855 |
+
5245
|
2856 |
+
5247
|
2857 |
+
5249
|
2858 |
+
436
|
2859 |
+
5250
|
2860 |
+
437
|
2861 |
+
5252
|
2862 |
+
438
|
2863 |
+
5253
|
2864 |
+
5255
|
2865 |
+
5256
|
2866 |
+
5257
|
2867 |
+
439
|
2868 |
+
5262
|
2869 |
+
5265
|
2870 |
+
5268
|
2871 |
+
5269
|
2872 |
+
5270
|
2873 |
+
5271
|
2874 |
+
5272
|
2875 |
+
5274
|
2876 |
+
5275
|
2877 |
+
5276
|
2878 |
+
5277
|
2879 |
+
5279
|
2880 |
+
5282
|
2881 |
+
5283
|
2882 |
+
440
|
2883 |
+
5284
|
2884 |
+
5285
|
2885 |
+
441
|
2886 |
+
5286
|
2887 |
+
5287
|
2888 |
+
5288
|
2889 |
+
5290
|
2890 |
+
5291
|
2891 |
+
5292
|
2892 |
+
5294
|
2893 |
+
442
|
2894 |
+
5296
|
2895 |
+
5297
|
2896 |
+
5300
|
2897 |
+
5303
|
2898 |
+
5305
|
2899 |
+
5307
|
2900 |
+
5308
|
2901 |
+
5309
|
2902 |
+
5313
|
2903 |
+
5315
|
2904 |
+
5316
|
2905 |
+
5317
|
2906 |
+
5318
|
2907 |
+
5319
|
2908 |
+
5325
|
2909 |
+
443
|
2910 |
+
5327
|
2911 |
+
5329
|
2912 |
+
444
|
2913 |
+
5330
|
2914 |
+
5332
|
2915 |
+
5333
|
2916 |
+
5334
|
2917 |
+
5335
|
2918 |
+
5336
|
2919 |
+
5338
|
2920 |
+
445
|
2921 |
+
5344
|
2922 |
+
5347
|
2923 |
+
5349
|
2924 |
+
5351
|
2925 |
+
5352
|
2926 |
+
5353
|
2927 |
+
5354
|
2928 |
+
5355
|
2929 |
+
446
|
2930 |
+
5357
|
2931 |
+
5359
|
2932 |
+
447
|
2933 |
+
5360
|
2934 |
+
5364
|
2935 |
+
5365
|
2936 |
+
5366
|
2937 |
+
5367
|
2938 |
+
5368
|
2939 |
+
5370
|
2940 |
+
448
|
2941 |
+
5375
|
2942 |
+
5380
|
2943 |
+
5382
|
2944 |
+
5384
|
2945 |
+
5387
|
2946 |
+
5388
|
2947 |
+
5390
|
2948 |
+
5391
|
2949 |
+
5393
|
2950 |
+
5394
|
2951 |
+
5395
|
2952 |
+
5398
|
2953 |
+
5399
|
2954 |
+
5400
|
2955 |
+
5402
|
2956 |
+
5403
|
2957 |
+
5404
|
2958 |
+
5405
|
2959 |
+
5406
|
2960 |
+
5411
|
2961 |
+
5412
|
2962 |
+
5414
|
2963 |
+
5415
|
2964 |
+
5416
|
2965 |
+
5417
|
2966 |
+
5418
|
2967 |
+
5420
|
2968 |
+
5422
|
2969 |
+
5425
|
2970 |
+
5426
|
2971 |
+
5428
|
2972 |
+
5429
|
2973 |
+
5430
|
2974 |
+
449
|
2975 |
+
5433
|
2976 |
+
5434
|
2977 |
+
5435
|
2978 |
+
450
|
2979 |
+
5436
|
2980 |
+
5437
|
2981 |
+
5439
|
2982 |
+
5443
|
2983 |
+
5444
|
2984 |
+
5445
|
2985 |
+
5446
|
2986 |
+
5449
|
2987 |
+
5450
|
2988 |
+
5452
|
2989 |
+
5454
|
2990 |
+
5455
|
2991 |
+
451
|
2992 |
+
5457
|
2993 |
+
5458
|
2994 |
+
5460
|
2995 |
+
5461
|
2996 |
+
5464
|
2997 |
+
5465
|
2998 |
+
5469
|
2999 |
+
5470
|
3000 |
+
5471
|
3001 |
+
5472
|
3002 |
+
5473
|
3003 |
+
5474
|
3004 |
+
452
|
3005 |
+
5475
|
3006 |
+
5476
|
3007 |
+
453
|
3008 |
+
5478
|
3009 |
+
5479
|
3010 |
+
5480
|
3011 |
+
5481
|
3012 |
+
454
|
3013 |
+
5483
|
3014 |
+
5484
|
3015 |
+
5485
|
3016 |
+
5486
|
3017 |
+
5487
|
3018 |
+
5488
|
3019 |
+
5490
|
3020 |
+
5491
|
3021 |
+
5492
|
3022 |
+
5495
|
3023 |
+
5496
|
3024 |
+
5498
|
3025 |
+
5502
|
3026 |
+
5503
|
3027 |
+
5505
|
3028 |
+
5506
|
3029 |
+
455
|
3030 |
+
5509
|
3031 |
+
5511
|
3032 |
+
5512
|
3033 |
+
5514
|
3034 |
+
5516
|
3035 |
+
5517
|
3036 |
+
456
|
3037 |
+
5520
|
3038 |
+
5521
|
3039 |
+
5522
|
3040 |
+
5523
|
3041 |
+
5524
|
3042 |
+
5525
|
3043 |
+
5527
|
3044 |
+
5528
|
3045 |
+
5529
|
3046 |
+
5530
|
3047 |
+
5532
|
3048 |
+
5533
|
3049 |
+
5534
|
3050 |
+
457
|
3051 |
+
5536
|
3052 |
+
5540
|
3053 |
+
5541
|
3054 |
+
5543
|
3055 |
+
5544
|
3056 |
+
5545
|
3057 |
+
5546
|
3058 |
+
5547
|
3059 |
+
5549
|
3060 |
+
5550
|
3061 |
+
5551
|
3062 |
+
5553
|
3063 |
+
5554
|
3064 |
+
5558
|
3065 |
+
5559
|
3066 |
+
5562
|
3067 |
+
5563
|
3068 |
+
5564
|
3069 |
+
5565
|
3070 |
+
5567
|
3071 |
+
5568
|
3072 |
+
5569
|
3073 |
+
458
|
3074 |
+
5570
|
3075 |
+
5572
|
3076 |
+
459
|
3077 |
+
5574
|
3078 |
+
5577
|
3079 |
+
5578
|
3080 |
+
5579
|
3081 |
+
5581
|
3082 |
+
5582
|
3083 |
+
460
|
3084 |
+
461
|
3085 |
+
5585
|
3086 |
+
5586
|
3087 |
+
5589
|
3088 |
+
5593
|
3089 |
+
5595
|
3090 |
+
5598
|
3091 |
+
5599
|
3092 |
+
5600
|
3093 |
+
5601
|
3094 |
+
5602
|
3095 |
+
5603
|
3096 |
+
5606
|
3097 |
+
5609
|
3098 |
+
5610
|
3099 |
+
5611
|
3100 |
+
5614
|
3101 |
+
5616
|
3102 |
+
5617
|
3103 |
+
5619
|
3104 |
+
5621
|
3105 |
+
5622
|
3106 |
+
5624
|
3107 |
+
5626
|
3108 |
+
5628
|
3109 |
+
5629
|
3110 |
+
5630
|
3111 |
+
5631
|
3112 |
+
5632
|
3113 |
+
462
|
3114 |
+
5635
|
3115 |
+
5637
|
3116 |
+
5638
|
3117 |
+
5639
|
3118 |
+
5641
|
3119 |
+
5642
|
3120 |
+
5646
|
3121 |
+
463
|
3122 |
+
5648
|
3123 |
+
464
|
3124 |
+
5650
|
3125 |
+
5652
|
3126 |
+
5654
|
3127 |
+
5655
|
3128 |
+
5656
|
3129 |
+
5657
|
3130 |
+
5658
|
3131 |
+
5659
|
3132 |
+
5660
|
3133 |
+
5661
|
3134 |
+
5662
|
3135 |
+
5664
|
3136 |
+
5665
|
3137 |
+
465
|
3138 |
+
466
|
3139 |
+
5666
|
3140 |
+
5667
|
3141 |
+
5668
|
3142 |
+
5670
|
3143 |
+
5671
|
3144 |
+
5672
|
3145 |
+
5674
|
3146 |
+
5675
|
3147 |
+
5676
|
3148 |
+
5677
|
3149 |
+
5679
|
3150 |
+
5681
|
3151 |
+
5682
|
3152 |
+
5684
|
3153 |
+
5686
|
3154 |
+
5687
|
3155 |
+
5689
|
3156 |
+
5690
|
3157 |
+
5691
|
3158 |
+
5695
|
3159 |
+
5696
|
3160 |
+
5698
|
3161 |
+
5700
|
3162 |
+
5701
|
3163 |
+
5705
|
3164 |
+
5707
|
3165 |
+
5708
|
3166 |
+
5709
|
3167 |
+
5711
|
3168 |
+
5712
|
3169 |
+
5713
|
3170 |
+
5714
|
3171 |
+
467
|
3172 |
+
5716
|
3173 |
+
5717
|
3174 |
+
5718
|
3175 |
+
5719
|
3176 |
+
5721
|
3177 |
+
5723
|
3178 |
+
5725
|
3179 |
+
5727
|
3180 |
+
5728
|
3181 |
+
5730
|
3182 |
+
468
|
3183 |
+
5732
|
3184 |
+
5733
|
3185 |
+
5734
|
3186 |
+
5735
|
3187 |
+
5736
|
3188 |
+
5737
|
3189 |
+
5738
|
3190 |
+
5739
|
3191 |
+
5740
|
3192 |
+
5741
|
3193 |
+
5742
|
3194 |
+
5743
|
3195 |
+
5744
|
3196 |
+
5746
|
3197 |
+
5747
|
3198 |
+
5748
|
3199 |
+
5749
|
3200 |
+
5750
|
3201 |
+
5752
|
3202 |
+
5753
|
3203 |
+
5754
|
3204 |
+
5755
|
3205 |
+
5756
|
3206 |
+
5757
|
3207 |
+
5758
|
3208 |
+
5762
|
3209 |
+
5765
|
3210 |
+
469
|
3211 |
+
5767
|
3212 |
+
5769
|
3213 |
+
5770
|
3214 |
+
5777
|
3215 |
+
5778
|
3216 |
+
5779
|
3217 |
+
5780
|
3218 |
+
5783
|
3219 |
+
5784
|
3220 |
+
5785
|
3221 |
+
5786
|
3222 |
+
5788
|
3223 |
+
5789
|
3224 |
+
5794
|
3225 |
+
5795
|
3226 |
+
5796
|
3227 |
+
5797
|
3228 |
+
5799
|
3229 |
+
5800
|
3230 |
+
5802
|
3231 |
+
5803
|
3232 |
+
470
|
3233 |
+
5806
|
3234 |
+
5807
|
3235 |
+
5811
|
3236 |
+
5813
|
3237 |
+
5817
|
3238 |
+
5819
|
3239 |
+
471
|
3240 |
+
5820
|
3241 |
+
472
|
3242 |
+
473
|
3243 |
+
5821
|
3244 |
+
5822
|
3245 |
+
5823
|
3246 |
+
5824
|
3247 |
+
5825
|
3248 |
+
5826
|
3249 |
+
5827
|
3250 |
+
5828
|
3251 |
+
5832
|
3252 |
+
5833
|
3253 |
+
5837
|
3254 |
+
5838
|
3255 |
+
5839
|
3256 |
+
5840
|
3257 |
+
5841
|
3258 |
+
5842
|
3259 |
+
5843
|
3260 |
+
5845
|
3261 |
+
5846
|
3262 |
+
5847
|
3263 |
+
5848
|
3264 |
+
5849
|
3265 |
+
5852
|
3266 |
+
5854
|
3267 |
+
5855
|
3268 |
+
5856
|
3269 |
+
5857
|
3270 |
+
5858
|
3271 |
+
5859
|
3272 |
+
5860
|
3273 |
+
5861
|
3274 |
+
5862
|
3275 |
+
5864
|
3276 |
+
5865
|
3277 |
+
5866
|
3278 |
+
5867
|
3279 |
+
474
|
3280 |
+
5869
|
3281 |
+
5870
|
3282 |
+
5871
|
3283 |
+
5872
|
3284 |
+
5873
|
3285 |
+
5878
|
3286 |
+
5882
|
3287 |
+
5883
|
3288 |
+
475
|
3289 |
+
476
|
3290 |
+
5886
|
3291 |
+
477
|
3292 |
+
5891
|
3293 |
+
5894
|
3294 |
+
5895
|
3295 |
+
5897
|
3296 |
+
5899
|
3297 |
+
5900
|
3298 |
+
5901
|
3299 |
+
5903
|
3300 |
+
5907
|
3301 |
+
5908
|
3302 |
+
5909
|
3303 |
+
5910
|
3304 |
+
5911
|
3305 |
+
478
|
3306 |
+
5912
|
3307 |
+
5913
|
3308 |
+
5914
|
3309 |
+
5916
|
3310 |
+
5919
|
3311 |
+
5920
|
3312 |
+
5921
|
3313 |
+
5922
|
3314 |
+
479
|
3315 |
+
5923
|
3316 |
+
5926
|
3317 |
+
5927
|
3318 |
+
5930
|
3319 |
+
5931
|
3320 |
+
5932
|
3321 |
+
5933
|
3322 |
+
5937
|
3323 |
+
480
|
3324 |
+
5938
|
3325 |
+
5939
|
3326 |
+
5940
|
3327 |
+
5941
|
3328 |
+
5942
|
3329 |
+
5944
|
3330 |
+
5945
|
3331 |
+
5946
|
3332 |
+
481
|
3333 |
+
5947
|
3334 |
+
482
|
3335 |
+
5948
|
3336 |
+
5949
|
3337 |
+
5951
|
3338 |
+
5952
|
3339 |
+
483
|
3340 |
+
5955
|
3341 |
+
5957
|
3342 |
+
484
|
3343 |
+
5959
|
3344 |
+
5960
|
3345 |
+
5961
|
3346 |
+
5962
|
3347 |
+
5963
|
3348 |
+
5965
|
3349 |
+
5966
|
3350 |
+
5969
|
3351 |
+
5970
|
3352 |
+
5971
|
3353 |
+
5976
|
3354 |
+
5978
|
3355 |
+
5981
|
3356 |
+
5982
|
3357 |
+
5986
|
3358 |
+
5987
|
3359 |
+
485
|
3360 |
+
5988
|
3361 |
+
5989
|
3362 |
+
5990
|
3363 |
+
5991
|
3364 |
+
5992
|
3365 |
+
5995
|
3366 |
+
5996
|
3367 |
+
5997
|
3368 |
+
486
|
3369 |
+
5998
|
3370 |
+
487
|
3371 |
+
5999
|
3372 |
+
6000
|
3373 |
+
6001
|
3374 |
+
6003
|
3375 |
+
6005
|
3376 |
+
6006
|
3377 |
+
6007
|
3378 |
+
6008
|
3379 |
+
6009
|
3380 |
+
6010
|
3381 |
+
6011
|
3382 |
+
6013
|
3383 |
+
6015
|
3384 |
+
6016
|
3385 |
+
488
|
3386 |
+
6017
|
3387 |
+
489
|
3388 |
+
490
|
3389 |
+
491
|
3390 |
+
6019
|
3391 |
+
6022
|
3392 |
+
6023
|
3393 |
+
6025
|
3394 |
+
6027
|
3395 |
+
6028
|
3396 |
+
6029
|
3397 |
+
6030
|
3398 |
+
6032
|
3399 |
+
6037
|
3400 |
+
6038
|
3401 |
+
6039
|
3402 |
+
6040
|
3403 |
+
6043
|
3404 |
+
6046
|
3405 |
+
6048
|
3406 |
+
6049
|
3407 |
+
6050
|
3408 |
+
6052
|
3409 |
+
6055
|
3410 |
+
6056
|
3411 |
+
6057
|
3412 |
+
6058
|
3413 |
+
6059
|
3414 |
+
6060
|
3415 |
+
6061
|
3416 |
+
6062
|
3417 |
+
6063
|
3418 |
+
6065
|
3419 |
+
6069
|
3420 |
+
6071
|
3421 |
+
6072
|
3422 |
+
6073
|
3423 |
+
6074
|
3424 |
+
492
|
3425 |
+
6075
|
3426 |
+
6076
|
3427 |
+
6077
|
3428 |
+
6079
|
3429 |
+
6081
|
3430 |
+
6082
|
3431 |
+
6083
|
3432 |
+
6084
|
3433 |
+
493
|
3434 |
+
6085
|
3435 |
+
494
|
3436 |
+
6088
|
3437 |
+
495
|
3438 |
+
6090
|
3439 |
+
6094
|
3440 |
+
6095
|
3441 |
+
6096
|
3442 |
+
6097
|
3443 |
+
6098
|
3444 |
+
6099
|
3445 |
+
6100
|
3446 |
+
6101
|
3447 |
+
6102
|
3448 |
+
6107
|
3449 |
+
6108
|
3450 |
+
496
|
3451 |
+
6110
|
3452 |
+
6111
|
3453 |
+
6112
|
3454 |
+
6113
|
3455 |
+
6115
|
3456 |
+
497
|
3457 |
+
6116
|
3458 |
+
6117
|
3459 |
+
6119
|
3460 |
+
6121
|
3461 |
+
6122
|
3462 |
+
6123
|
3463 |
+
6124
|
3464 |
+
6125
|
3465 |
+
6126
|
3466 |
+
6127
|
3467 |
+
6128
|
3468 |
+
6129
|
3469 |
+
498
|
3470 |
+
6131
|
3471 |
+
6132
|
3472 |
+
6134
|
3473 |
+
6135
|
3474 |
+
6136
|
3475 |
+
6137
|
3476 |
+
6139
|
3477 |
+
6140
|
3478 |
+
6143
|
3479 |
+
6144
|
3480 |
+
6148
|
3481 |
+
6153
|
3482 |
+
6155
|
3483 |
+
6157
|
3484 |
+
6158
|
3485 |
+
6159
|
3486 |
+
6160
|
3487 |
+
6161
|
3488 |
+
6162
|
3489 |
+
6164
|
3490 |
+
6165
|
3491 |
+
6166
|
3492 |
+
6168
|
3493 |
+
6170
|
3494 |
+
6172
|
3495 |
+
499
|
3496 |
+
6173
|
3497 |
+
6174
|
3498 |
+
500
|
3499 |
+
6176
|
3500 |
+
6181
|
3501 |
+
6182
|
3502 |
+
6184
|
3503 |
+
6187
|
3504 |
+
6189
|
3505 |
+
6190
|
3506 |
+
501
|
3507 |
+
6192
|
3508 |
+
6193
|
3509 |
+
6194
|
3510 |
+
6195
|
3511 |
+
6196
|
3512 |
+
6197
|
3513 |
+
502
|
3514 |
+
6199
|
3515 |
+
6200
|
3516 |
+
6201
|
3517 |
+
6202
|
3518 |
+
6205
|
3519 |
+
6206
|
3520 |
+
6208
|
3521 |
+
6209
|
3522 |
+
6210
|
3523 |
+
6211
|
3524 |
+
6212
|
3525 |
+
6213
|
3526 |
+
6214
|
3527 |
+
6215
|
3528 |
+
6222
|
3529 |
+
6224
|
3530 |
+
6226
|
3531 |
+
6233
|
3532 |
+
6234
|
3533 |
+
6235
|
3534 |
+
6238
|
3535 |
+
6239
|
3536 |
+
6240
|
3537 |
+
6245
|
3538 |
+
6249
|
3539 |
+
6250
|
3540 |
+
6251
|
3541 |
+
6252
|
3542 |
+
6254
|
3543 |
+
6255
|
3544 |
+
6256
|
3545 |
+
503
|
3546 |
+
6257
|
3547 |
+
6260
|
3548 |
+
6261
|
3549 |
+
6262
|
3550 |
+
6263
|
3551 |
+
6264
|
3552 |
+
504
|
3553 |
+
505
|
3554 |
+
6266
|
3555 |
+
6267
|
3556 |
+
6268
|
3557 |
+
6270
|
3558 |
+
6271
|
3559 |
+
6272
|
3560 |
+
506
|
3561 |
+
6275
|
3562 |
+
6277
|
3563 |
+
6278
|
3564 |
+
6279
|
3565 |
+
6280
|
3566 |
+
6281
|
3567 |
+
6282
|
3568 |
+
6283
|
3569 |
+
6284
|
3570 |
+
6285
|
3571 |
+
6286
|
3572 |
+
6290
|
3573 |
+
6291
|
3574 |
+
6295
|
3575 |
+
6296
|
3576 |
+
6297
|
3577 |
+
6298
|
3578 |
+
6300
|
3579 |
+
6301
|
3580 |
+
6302
|
3581 |
+
6303
|
3582 |
+
6304
|
3583 |
+
6305
|
3584 |
+
507
|
3585 |
+
6308
|
3586 |
+
6309
|
3587 |
+
6310
|
3588 |
+
6315
|
3589 |
+
6316
|
3590 |
+
6317
|
3591 |
+
6320
|
3592 |
+
6321
|
3593 |
+
6322
|
3594 |
+
6323
|
3595 |
+
6324
|
3596 |
+
6328
|
3597 |
+
6329
|
3598 |
+
6333
|
3599 |
+
6335
|
3600 |
+
6337
|
3601 |
+
6338
|
3602 |
+
6339
|
3603 |
+
6340
|
3604 |
+
6341
|
3605 |
+
508
|
3606 |
+
6342
|
3607 |
+
6344
|
3608 |
+
6346
|
3609 |
+
6348
|
3610 |
+
6349
|
3611 |
+
6350
|
3612 |
+
6351
|
3613 |
+
6352
|
3614 |
+
6353
|
3615 |
+
6354
|
3616 |
+
6357
|
3617 |
+
6358
|
3618 |
+
6359
|
3619 |
+
509
|
3620 |
+
6362
|
3621 |
+
6363
|
3622 |
+
6364
|
3623 |
+
6365
|
3624 |
+
6369
|
3625 |
+
6371
|
3626 |
+
6374
|
3627 |
+
6375
|
3628 |
+
6377
|
3629 |
+
6378
|
3630 |
+
6381
|
3631 |
+
6382
|
3632 |
+
510
|
3633 |
+
6385
|
3634 |
+
6386
|
3635 |
+
6387
|
3636 |
+
6388
|
3637 |
+
6389
|
3638 |
+
6391
|
3639 |
+
6393
|
3640 |
+
6394
|
3641 |
+
6395
|
3642 |
+
6396
|
3643 |
+
6399
|
3644 |
+
511
|
3645 |
+
6400
|
3646 |
+
6401
|
3647 |
+
6402
|
3648 |
+
6403
|
3649 |
+
6406
|
3650 |
+
6407
|
3651 |
+
6408
|
3652 |
+
6409
|
3653 |
+
6410
|
3654 |
+
6412
|
3655 |
+
6415
|
3656 |
+
6416
|
3657 |
+
6419
|
3658 |
+
6420
|
3659 |
+
6422
|
3660 |
+
6423
|
3661 |
+
6428
|
3662 |
+
6429
|
3663 |
+
6430
|
3664 |
+
6432
|
3665 |
+
6435
|
3666 |
+
512
|
3667 |
+
6437
|
3668 |
+
6438
|
3669 |
+
6439
|
3670 |
+
513
|
3671 |
+
6441
|
3672 |
+
6442
|
3673 |
+
6443
|
3674 |
+
6446
|
3675 |
+
6447
|
3676 |
+
6448
|
3677 |
+
6450
|
3678 |
+
6451
|
3679 |
+
6453
|
3680 |
+
6454
|
3681 |
+
6455
|
3682 |
+
6459
|
3683 |
+
6460
|
3684 |
+
6462
|
3685 |
+
6463
|
3686 |
+
6466
|
3687 |
+
6467
|
3688 |
+
6471
|
3689 |
+
6472
|
3690 |
+
6473
|
3691 |
+
6474
|
3692 |
+
6476
|
3693 |
+
6477
|
3694 |
+
6480
|
3695 |
+
6481
|
3696 |
+
6482
|
3697 |
+
6483
|
3698 |
+
6484
|
3699 |
+
6485
|
3700 |
+
6487
|
3701 |
+
6489
|
3702 |
+
6490
|
3703 |
+
514
|
3704 |
+
515
|
3705 |
+
6492
|
3706 |
+
6493
|
3707 |
+
6494
|
3708 |
+
516
|
3709 |
+
6496
|
3710 |
+
6498
|
3711 |
+
6499
|
3712 |
+
517
|
3713 |
+
6502
|
3714 |
+
6503
|
3715 |
+
518
|
3716 |
+
519
|
3717 |
+
6505
|
3718 |
+
6506
|
3719 |
+
6507
|
3720 |
+
6508
|
3721 |
+
6509
|
3722 |
+
6510
|
3723 |
+
6512
|
3724 |
+
6513
|
3725 |
+
6517
|
3726 |
+
520
|
3727 |
+
6520
|
3728 |
+
6521
|
3729 |
+
6522
|
3730 |
+
6523
|
3731 |
+
6525
|
3732 |
+
6526
|
3733 |
+
6527
|
3734 |
+
6528
|
3735 |
+
521
|
3736 |
+
522
|
3737 |
+
6532
|
3738 |
+
6533
|
3739 |
+
6534
|
3740 |
+
6540
|
3741 |
+
6542
|
3742 |
+
6546
|
3743 |
+
6547
|
3744 |
+
6549
|
3745 |
+
6552
|
3746 |
+
6553
|
3747 |
+
6554
|
3748 |
+
6556
|
3749 |
+
6559
|
3750 |
+
6560
|
3751 |
+
6561
|
3752 |
+
6563
|
3753 |
+
523
|
3754 |
+
6568
|
3755 |
+
6575
|
3756 |
+
6578
|
3757 |
+
6579
|
3758 |
+
524
|
3759 |
+
6584
|
3760 |
+
6585
|
3761 |
+
6586
|
3762 |
+
6589
|
3763 |
+
6590
|
3764 |
+
6591
|
3765 |
+
6593
|
3766 |
+
6595
|
3767 |
+
6597
|
3768 |
+
6599
|
3769 |
+
6600
|
3770 |
+
6603
|
3771 |
+
6604
|
3772 |
+
6609
|
3773 |
+
6614
|
3774 |
+
6615
|
3775 |
+
6619
|
3776 |
+
6621
|
3777 |
+
6622
|
3778 |
+
6623
|
3779 |
+
6627
|
3780 |
+
6628
|
3781 |
+
6629
|
3782 |
+
525
|
3783 |
+
6632
|
3784 |
+
6634
|
3785 |
+
6641
|
3786 |
+
6642
|
3787 |
+
6647
|
3788 |
+
6648
|
3789 |
+
6649
|
3790 |
+
6651
|
3791 |
+
6653
|
3792 |
+
6654
|
3793 |
+
6658
|
3794 |
+
6664
|
3795 |
+
6665
|
3796 |
+
6666
|
3797 |
+
6672
|
3798 |
+
6675
|
3799 |
+
6676
|
3800 |
+
6677
|
3801 |
+
6678
|
3802 |
+
6679
|
3803 |
+
6681
|
3804 |
+
6684
|
3805 |
+
6685
|
3806 |
+
6686
|
3807 |
+
6688
|
3808 |
+
6689
|
3809 |
+
6690
|
3810 |
+
6691
|
3811 |
+
6693
|
3812 |
+
6698
|
3813 |
+
6699
|
3814 |
+
6700
|
3815 |
+
6701
|
3816 |
+
6702
|
3817 |
+
6704
|
3818 |
+
6706
|
3819 |
+
6708
|
3820 |
+
526
|
3821 |
+
6709
|
3822 |
+
527
|
3823 |
+
6710
|
3824 |
+
6711
|
3825 |
+
6713
|
3826 |
+
6714
|
3827 |
+
6715
|
3828 |
+
6720
|
3829 |
+
6723
|
3830 |
+
6724
|
3831 |
+
6725
|
3832 |
+
6727
|
3833 |
+
528
|
3834 |
+
529
|
3835 |
+
6730
|
3836 |
+
6732
|
3837 |
+
6736
|
3838 |
+
6740
|
3839 |
+
6742
|
3840 |
+
6743
|
3841 |
+
6744
|
3842 |
+
6745
|
3843 |
+
6751
|
3844 |
+
6754
|
3845 |
+
6755
|
3846 |
+
530
|
3847 |
+
6757
|
3848 |
+
6758
|
3849 |
+
6759
|
3850 |
+
531
|
3851 |
+
6762
|
3852 |
+
6765
|
3853 |
+
6766
|
3854 |
+
6767
|
3855 |
+
6768
|
3856 |
+
6769
|
3857 |
+
6770
|
3858 |
+
6771
|
3859 |
+
6772
|
3860 |
+
6773
|
3861 |
+
532
|
3862 |
+
6774
|
3863 |
+
6775
|
3864 |
+
6776
|
3865 |
+
6779
|
3866 |
+
6782
|
3867 |
+
6786
|
3868 |
+
6788
|
3869 |
+
6792
|
3870 |
+
6793
|
3871 |
+
6794
|
3872 |
+
6797
|
3873 |
+
6798
|
3874 |
+
6801
|
3875 |
+
6802
|
3876 |
+
6803
|
3877 |
+
6805
|
3878 |
+
533
|
3879 |
+
6806
|
3880 |
+
534
|
3881 |
+
6807
|
3882 |
+
535
|
3883 |
+
6808
|
3884 |
+
6809
|
3885 |
+
6811
|
3886 |
+
6812
|
3887 |
+
6813
|
3888 |
+
6814
|
3889 |
+
6816
|
3890 |
+
6819
|
3891 |
+
6820
|
3892 |
+
6823
|
3893 |
+
6826
|
3894 |
+
6827
|
3895 |
+
6828
|
3896 |
+
6829
|
3897 |
+
6831
|
3898 |
+
6836
|
3899 |
+
6840
|
3900 |
+
6842
|
3901 |
+
536
|
3902 |
+
537
|
3903 |
+
6847
|
3904 |
+
6848
|
3905 |
+
6849
|
3906 |
+
6850
|
3907 |
+
6851
|
3908 |
+
6852
|
3909 |
+
6854
|
3910 |
+
538
|
3911 |
+
6855
|
3912 |
+
6856
|
3913 |
+
6857
|
3914 |
+
6858
|
3915 |
+
6859
|
3916 |
+
6860
|
3917 |
+
6861
|
3918 |
+
6862
|
3919 |
+
6865
|
3920 |
+
539
|
3921 |
+
6867
|
3922 |
+
6868
|
3923 |
+
6871
|
3924 |
+
6872
|
3925 |
+
6874
|
3926 |
+
6877
|
3927 |
+
6878
|
3928 |
+
6880
|
3929 |
+
6881
|
3930 |
+
6882
|
3931 |
+
6883
|
3932 |
+
6885
|
3933 |
+
6886
|
3934 |
+
6890
|
3935 |
+
6891
|
3936 |
+
6893
|
3937 |
+
6895
|
3938 |
+
6897
|
3939 |
+
6899
|
3940 |
+
6900
|
3941 |
+
6901
|
3942 |
+
6902
|
3943 |
+
6904
|
3944 |
+
6907
|
3945 |
+
6908
|
3946 |
+
6909
|
3947 |
+
6910
|
3948 |
+
6911
|
3949 |
+
6912
|
3950 |
+
6913
|
3951 |
+
6914
|
3952 |
+
6915
|
3953 |
+
6916
|
3954 |
+
6918
|
3955 |
+
6919
|
3956 |
+
6920
|
3957 |
+
6922
|
3958 |
+
6924
|
3959 |
+
6925
|
3960 |
+
6927
|
3961 |
+
6928
|
3962 |
+
6929
|
3963 |
+
6930
|
3964 |
+
6931
|
3965 |
+
6934
|
3966 |
+
6935
|
3967 |
+
6937
|
3968 |
+
6938
|
3969 |
+
540
|
3970 |
+
6939
|
3971 |
+
6940
|
3972 |
+
6941
|
3973 |
+
6942
|
3974 |
+
6947
|
3975 |
+
6949
|
3976 |
+
6951
|
3977 |
+
6952
|
3978 |
+
6954
|
3979 |
+
6955
|
3980 |
+
6958
|
3981 |
+
6959
|
3982 |
+
6962
|
3983 |
+
6964
|
3984 |
+
6965
|
3985 |
+
6969
|
3986 |
+
541
|
3987 |
+
6971
|
3988 |
+
6972
|
3989 |
+
6973
|
3990 |
+
542
|
3991 |
+
6975
|
3992 |
+
6978
|
3993 |
+
6979
|
3994 |
+
6980
|
3995 |
+
6984
|
3996 |
+
6988
|
3997 |
+
6990
|
3998 |
+
6991
|
3999 |
+
6992
|
4000 |
+
6993
|
4001 |
+
6994
|
4002 |
+
6997
|
4003 |
+
543
|
4004 |
+
7001
|
4005 |
+
7002
|
4006 |
+
7003
|
4007 |
+
7006
|
4008 |
+
7007
|
4009 |
+
7009
|
4010 |
+
7010
|
4011 |
+
7012
|
4012 |
+
7014
|
4013 |
+
7015
|
4014 |
+
7016
|
4015 |
+
544
|
4016 |
+
7017
|
4017 |
+
7018
|
4018 |
+
7022
|
4019 |
+
7023
|
4020 |
+
7026
|
4021 |
+
7027
|
4022 |
+
7028
|
4023 |
+
7029
|
4024 |
+
7030
|
4025 |
+
7031
|
4026 |
+
7032
|
4027 |
+
7033
|
4028 |
+
7034
|
4029 |
+
7038
|
4030 |
+
7042
|
4031 |
+
7044
|
4032 |
+
7045
|
4033 |
+
7046
|
4034 |
+
7048
|
4035 |
+
7049
|
4036 |
+
7050
|
4037 |
+
7051
|
4038 |
+
7052
|
4039 |
+
7053
|
4040 |
+
7055
|
4041 |
+
7056
|
4042 |
+
7057
|
4043 |
+
7059
|
4044 |
+
7060
|
4045 |
+
545
|
4046 |
+
546
|
4047 |
+
7065
|
4048 |
+
7066
|
4049 |
+
7067
|
4050 |
+
547
|
4051 |
+
7068
|
4052 |
+
7069
|
4053 |
+
7070
|
4054 |
+
7071
|
4055 |
+
7072
|
4056 |
+
7073
|
4057 |
+
7074
|
4058 |
+
7075
|
4059 |
+
7081
|
4060 |
+
7084
|
4061 |
+
7088
|
4062 |
+
7090
|
4063 |
+
7091
|
4064 |
+
7092
|
4065 |
+
7095
|
4066 |
+
7100
|
4067 |
+
7101
|
4068 |
+
7103
|
4069 |
+
7105
|
4070 |
+
7106
|
4071 |
+
7107
|
4072 |
+
7110
|
4073 |
+
7112
|
4074 |
+
7113
|
4075 |
+
7114
|
4076 |
+
7115
|
4077 |
+
7117
|
4078 |
+
7119
|
4079 |
+
7120
|
4080 |
+
7121
|
4081 |
+
7122
|
4082 |
+
7125
|
4083 |
+
7126
|
4084 |
+
7127
|
4085 |
+
7128
|
4086 |
+
7129
|
4087 |
+
548
|
4088 |
+
7130
|
4089 |
+
7132
|
4090 |
+
549
|
4091 |
+
7133
|
4092 |
+
7136
|
4093 |
+
7137
|
4094 |
+
7141
|
4095 |
+
7142
|
4096 |
+
7143
|
4097 |
+
7144
|
4098 |
+
7145
|
4099 |
+
7146
|
4100 |
+
7148
|
4101 |
+
7150
|
4102 |
+
7152
|
4103 |
+
7153
|
4104 |
+
7155
|
4105 |
+
7156
|
4106 |
+
550
|
4107 |
+
7157
|
4108 |
+
7158
|
4109 |
+
7159
|
4110 |
+
7161
|
4111 |
+
7162
|
4112 |
+
7164
|
4113 |
+
7165
|
4114 |
+
7167
|
4115 |
+
7169
|
4116 |
+
7170
|
4117 |
+
7171
|
4118 |
+
7172
|
4119 |
+
7174
|
4120 |
+
7175
|
4121 |
+
7177
|
4122 |
+
7184
|
4123 |
+
7186
|
4124 |
+
7188
|
4125 |
+
7189
|
4126 |
+
7190
|
4127 |
+
7192
|
4128 |
+
7193
|
4129 |
+
7194
|
4130 |
+
7195
|
4131 |
+
7196
|
4132 |
+
7197
|
4133 |
+
7198
|
4134 |
+
7200
|
4135 |
+
551
|
4136 |
+
7201
|
4137 |
+
7202
|
4138 |
+
7205
|
4139 |
+
7206
|
4140 |
+
7211
|
4141 |
+
7212
|
4142 |
+
7213
|
4143 |
+
7214
|
4144 |
+
7215
|
4145 |
+
7216
|
4146 |
+
7218
|
4147 |
+
7220
|
4148 |
+
7221
|
4149 |
+
7222
|
4150 |
+
7223
|
4151 |
+
7224
|
4152 |
+
7226
|
4153 |
+
7227
|
4154 |
+
7229
|
4155 |
+
7231
|
4156 |
+
7232
|
4157 |
+
7233
|
4158 |
+
7234
|
4159 |
+
7235
|
4160 |
+
7236
|
4161 |
+
7239
|
4162 |
+
7242
|
4163 |
+
7243
|
4164 |
+
7245
|
4165 |
+
552
|
4166 |
+
7246
|
4167 |
+
7247
|
4168 |
+
7251
|
4169 |
+
7252
|
4170 |
+
7253
|
4171 |
+
7254
|
4172 |
+
7255
|
4173 |
+
7256
|
4174 |
+
7257
|
4175 |
+
7260
|
4176 |
+
7261
|
4177 |
+
7262
|
4178 |
+
7267
|
4179 |
+
7269
|
4180 |
+
7270
|
4181 |
+
7272
|
4182 |
+
7273
|
4183 |
+
7276
|
4184 |
+
7277
|
4185 |
+
7278
|
4186 |
+
7280
|
4187 |
+
7281
|
4188 |
+
7284
|
4189 |
+
7285
|
4190 |
+
7287
|
4191 |
+
7288
|
4192 |
+
7293
|
4193 |
+
7294
|
4194 |
+
553
|
4195 |
+
7297
|
4196 |
+
7301
|
4197 |
+
7303
|
4198 |
+
7304
|
4199 |
+
7306
|
4200 |
+
7309
|
4201 |
+
7310
|
4202 |
+
7311
|
4203 |
+
7312
|
4204 |
+
7313
|
4205 |
+
7314
|
4206 |
+
7316
|
4207 |
+
7321
|
4208 |
+
7322
|
4209 |
+
7323
|
4210 |
+
7324
|
4211 |
+
7325
|
4212 |
+
554
|
4213 |
+
7327
|
4214 |
+
555
|
4215 |
+
7330
|
4216 |
+
7331
|
4217 |
+
7333
|
4218 |
+
556
|
4219 |
+
7335
|
4220 |
+
7337
|
4221 |
+
7341
|
4222 |
+
7343
|
4223 |
+
7344
|
4224 |
+
7345
|
4225 |
+
7348
|
4226 |
+
7351
|
4227 |
+
7352
|
4228 |
+
7355
|
4229 |
+
7358
|
4230 |
+
7359
|
4231 |
+
7360
|
4232 |
+
7362
|
4233 |
+
557
|
4234 |
+
7367
|
4235 |
+
7368
|
4236 |
+
7370
|
4237 |
+
7371
|
4238 |
+
7372
|
4239 |
+
7373
|
4240 |
+
7374
|
4241 |
+
7376
|
4242 |
+
7377
|
4243 |
+
7378
|
4244 |
+
7379
|
4245 |
+
7381
|
4246 |
+
7382
|
4247 |
+
7383
|
4248 |
+
7386
|
4249 |
+
7387
|
4250 |
+
7388
|
4251 |
+
7392
|
4252 |
+
7393
|
4253 |
+
7394
|
4254 |
+
7395
|
4255 |
+
7396
|
4256 |
+
7398
|
4257 |
+
7400
|
4258 |
+
7401
|
4259 |
+
7402
|
4260 |
+
7403
|
4261 |
+
7406
|
4262 |
+
7407
|
4263 |
+
7408
|
4264 |
+
7410
|
4265 |
+
7411
|
4266 |
+
7415
|
4267 |
+
7416
|
4268 |
+
7420
|
4269 |
+
7422
|
4270 |
+
558
|
4271 |
+
7423
|
4272 |
+
7426
|
4273 |
+
7427
|
4274 |
+
7428
|
4275 |
+
7431
|
4276 |
+
7432
|
4277 |
+
7434
|
4278 |
+
7435
|
4279 |
+
7436
|
4280 |
+
7437
|
4281 |
+
7438
|
4282 |
+
7439
|
4283 |
+
7440
|
4284 |
+
559
|
4285 |
+
7442
|
4286 |
+
7443
|
4287 |
+
7444
|
4288 |
+
560
|
4289 |
+
7449
|
4290 |
+
7450
|
4291 |
+
7452
|
4292 |
+
7456
|
4293 |
+
7457
|
4294 |
+
7458
|
4295 |
+
7464
|
4296 |
+
7465
|
4297 |
+
7468
|
4298 |
+
7470
|
4299 |
+
561
|
4300 |
+
7477
|
4301 |
+
7479
|
4302 |
+
7480
|
4303 |
+
7482
|
4304 |
+
7483
|
4305 |
+
7484
|
4306 |
+
7486
|
4307 |
+
562
|
4308 |
+
563
|
4309 |
+
7487
|
4310 |
+
564
|
4311 |
+
7490
|
4312 |
+
7491
|
4313 |
+
7492
|
4314 |
+
7494
|
4315 |
+
7497
|
4316 |
+
7498
|
4317 |
+
7499
|
4318 |
+
7501
|
4319 |
+
7503
|
4320 |
+
7504
|
4321 |
+
7507
|
4322 |
+
565
|
4323 |
+
7509
|
4324 |
+
7510
|
4325 |
+
7511
|
4326 |
+
566
|
4327 |
+
7512
|
4328 |
+
7514
|
4329 |
+
7515
|
4330 |
+
7516
|
4331 |
+
7517
|
4332 |
+
7518
|
4333 |
+
7520
|
4334 |
+
7521
|
4335 |
+
7523
|
4336 |
+
7524
|
4337 |
+
7525
|
4338 |
+
7526
|
4339 |
+
7528
|
4340 |
+
7529
|
4341 |
+
7530
|
4342 |
+
567
|
4343 |
+
7531
|
4344 |
+
7532
|
4345 |
+
7533
|
4346 |
+
7535
|
4347 |
+
7536
|
4348 |
+
7539
|
4349 |
+
7540
|
4350 |
+
7542
|
4351 |
+
568
|
4352 |
+
7543
|
4353 |
+
7544
|
4354 |
+
7546
|
4355 |
+
7547
|
4356 |
+
7548
|
4357 |
+
7549
|
4358 |
+
7551
|
4359 |
+
7552
|
4360 |
+
7553
|
4361 |
+
7554
|
4362 |
+
7557
|
4363 |
+
7558
|
4364 |
+
7559
|
4365 |
+
7560
|
4366 |
+
7565
|
4367 |
+
7566
|
4368 |
+
7567
|
4369 |
+
7568
|
4370 |
+
7569
|
4371 |
+
7573
|
4372 |
+
7581
|
4373 |
+
7583
|
4374 |
+
7584
|
4375 |
+
7585
|
4376 |
+
7586
|
4377 |
+
7587
|
4378 |
+
7589
|
4379 |
+
7591
|
4380 |
+
7592
|
4381 |
+
7593
|
4382 |
+
7594
|
4383 |
+
7595
|
4384 |
+
569
|
4385 |
+
7596
|
4386 |
+
7597
|
4387 |
+
7598
|
4388 |
+
7599
|
4389 |
+
7600
|
4390 |
+
7601
|
4391 |
+
7602
|
4392 |
+
7603
|
4393 |
+
7605
|
4394 |
+
7606
|
4395 |
+
7607
|
4396 |
+
7608
|
4397 |
+
7610
|
4398 |
+
7611
|
4399 |
+
7612
|
4400 |
+
7613
|
4401 |
+
7616
|
4402 |
+
7619
|
4403 |
+
7622
|
4404 |
+
7623
|
4405 |
+
7624
|
4406 |
+
7625
|
4407 |
+
7626
|
4408 |
+
570
|
4409 |
+
7628
|
4410 |
+
7629
|
4411 |
+
7630
|
4412 |
+
7631
|
4413 |
+
7632
|
4414 |
+
571
|
4415 |
+
7633
|
4416 |
+
7634
|
4417 |
+
7635
|
4418 |
+
7636
|
4419 |
+
7640
|
4420 |
+
7641
|
4421 |
+
7642
|
4422 |
+
7643
|
4423 |
+
7644
|
4424 |
+
7645
|
4425 |
+
7646
|
4426 |
+
7647
|
4427 |
+
7648
|
4428 |
+
7650
|
4429 |
+
7651
|
4430 |
+
7652
|
4431 |
+
7653
|
4432 |
+
7654
|
4433 |
+
7656
|
4434 |
+
7658
|
4435 |
+
7659
|
4436 |
+
7660
|
4437 |
+
7665
|
4438 |
+
7666
|
4439 |
+
7667
|
4440 |
+
7669
|
4441 |
+
7670
|
4442 |
+
7671
|
4443 |
+
7672
|
4444 |
+
7673
|
4445 |
+
7674
|
4446 |
+
7675
|
4447 |
+
7678
|
4448 |
+
7682
|
4449 |
+
7684
|
4450 |
+
7685
|
4451 |
+
7686
|
4452 |
+
7687
|
4453 |
+
7688
|
4454 |
+
7689
|
4455 |
+
7691
|
4456 |
+
7692
|
4457 |
+
7694
|
4458 |
+
7695
|
4459 |
+
7696
|
4460 |
+
7698
|
4461 |
+
7699
|
4462 |
+
7701
|
4463 |
+
7704
|
4464 |
+
7705
|
4465 |
+
7706
|
4466 |
+
7707
|
4467 |
+
572
|
4468 |
+
7709
|
4469 |
+
573
|
4470 |
+
7710
|
4471 |
+
7711
|
4472 |
+
574
|
4473 |
+
575
|
4474 |
+
7712
|
4475 |
+
7713
|
4476 |
+
7714
|
4477 |
+
7715
|
4478 |
+
7716
|
4479 |
+
576
|
4480 |
+
577
|
4481 |
+
7719
|
4482 |
+
7721
|
4483 |
+
7723
|
4484 |
+
7724
|
4485 |
+
578
|
4486 |
+
7727
|
4487 |
+
7728
|
4488 |
+
7730
|
4489 |
+
7731
|
4490 |
+
7734
|
4491 |
+
7735
|
4492 |
+
7736
|
4493 |
+
7737
|
4494 |
+
7738
|
4495 |
+
579
|
4496 |
+
7739
|
4497 |
+
7741
|
4498 |
+
7742
|
4499 |
+
7743
|
4500 |
+
7744
|
4501 |
+
7745
|
4502 |
+
7746
|
4503 |
+
7747
|
4504 |
+
7749
|
4505 |
+
7752
|
4506 |
+
7753
|
4507 |
+
7754
|
4508 |
+
7755
|
4509 |
+
7756
|
4510 |
+
7757
|
4511 |
+
7758
|
4512 |
+
7760
|
4513 |
+
580
|
4514 |
+
7761
|
4515 |
+
7763
|
4516 |
+
581
|
4517 |
+
7766
|
4518 |
+
7767
|
4519 |
+
7768
|
4520 |
+
7771
|
4521 |
+
582
|
4522 |
+
7774
|
4523 |
+
7775
|
4524 |
+
7780
|
4525 |
+
7781
|
4526 |
+
7782
|
4527 |
+
7787
|
4528 |
+
7789
|
4529 |
+
7790
|
4530 |
+
7792
|
4531 |
+
7795
|
4532 |
+
7796
|
4533 |
+
7797
|
4534 |
+
583
|
4535 |
+
7801
|
4536 |
+
7802
|
4537 |
+
7804
|
4538 |
+
7806
|
4539 |
+
7807
|
4540 |
+
7809
|
4541 |
+
7810
|
4542 |
+
7812
|
4543 |
+
7816
|
4544 |
+
7819
|
4545 |
+
7822
|
4546 |
+
7828
|
4547 |
+
7834
|
4548 |
+
7835
|
4549 |
+
7836
|
4550 |
+
7838
|
4551 |
+
7840
|
4552 |
+
7841
|
4553 |
+
584
|
4554 |
+
585
|
4555 |
+
7846
|
4556 |
+
586
|
4557 |
+
7849
|
4558 |
+
7850
|
4559 |
+
7851
|
4560 |
+
7853
|
4561 |
+
7854
|
4562 |
+
7855
|
4563 |
+
7856
|
4564 |
+
587
|
4565 |
+
7861
|
4566 |
+
588
|
4567 |
+
7862
|
4568 |
+
7863
|
4569 |
+
7865
|
4570 |
+
589
|
4571 |
+
7868
|
4572 |
+
7869
|
4573 |
+
7870
|
4574 |
+
7871
|
4575 |
+
7872
|
4576 |
+
7873
|
4577 |
+
7874
|
4578 |
+
7875
|
4579 |
+
7876
|
4580 |
+
590
|
4581 |
+
591
|
4582 |
+
7878
|
4583 |
+
7879
|
4584 |
+
7880
|
4585 |
+
7881
|
4586 |
+
7882
|
4587 |
+
7883
|
4588 |
+
7884
|
4589 |
+
7886
|
4590 |
+
7887
|
4591 |
+
7890
|
4592 |
+
7892
|
4593 |
+
7893
|
4594 |
+
7894
|
4595 |
+
7899
|
4596 |
+
7900
|
4597 |
+
7902
|
4598 |
+
7904
|
4599 |
+
592
|
4600 |
+
7905
|
4601 |
+
593
|
4602 |
+
7909
|
4603 |
+
7910
|
4604 |
+
7911
|
4605 |
+
594
|
4606 |
+
7912
|
4607 |
+
7916
|
4608 |
+
7918
|
4609 |
+
595
|
4610 |
+
7920
|
4611 |
+
7921
|
4612 |
+
7922
|
4613 |
+
7924
|
4614 |
+
7925
|
4615 |
+
596
|
4616 |
+
7927
|
4617 |
+
7928
|
4618 |
+
7929
|
4619 |
+
7931
|
4620 |
+
7933
|
4621 |
+
7936
|
4622 |
+
7939
|
4623 |
+
7942
|
4624 |
+
7943
|
4625 |
+
7944
|
4626 |
+
7945
|
4627 |
+
7949
|
4628 |
+
7951
|
4629 |
+
7954
|
4630 |
+
7955
|
4631 |
+
7957
|
4632 |
+
7959
|
4633 |
+
7960
|
4634 |
+
7961
|
4635 |
+
7962
|
4636 |
+
7964
|
4637 |
+
7965
|
4638 |
+
7966
|
4639 |
+
7967
|
4640 |
+
7968
|
4641 |
+
7969
|
4642 |
+
7970
|
4643 |
+
7971
|
4644 |
+
7973
|
4645 |
+
7974
|
4646 |
+
7976
|
4647 |
+
7977
|
4648 |
+
7978
|
4649 |
+
7981
|
4650 |
+
7987
|
4651 |
+
7988
|
4652 |
+
7990
|
4653 |
+
7993
|
4654 |
+
7994
|
4655 |
+
7995
|
4656 |
+
7998
|
4657 |
+
7999
|
4658 |
+
8000
|
4659 |
+
8004
|
4660 |
+
8006
|
4661 |
+
8007
|
4662 |
+
8009
|
4663 |
+
8010
|
4664 |
+
8011
|
4665 |
+
8013
|
4666 |
+
8015
|
4667 |
+
8016
|
4668 |
+
8019
|
4669 |
+
8020
|
4670 |
+
8022
|
4671 |
+
8025
|
4672 |
+
8026
|
4673 |
+
8028
|
4674 |
+
8029
|
4675 |
+
8031
|
4676 |
+
8032
|
4677 |
+
8033
|
4678 |
+
8034
|
4679 |
+
8037
|
4680 |
+
8040
|
4681 |
+
8041
|
4682 |
+
8044
|
4683 |
+
8045
|
4684 |
+
8047
|
4685 |
+
8049
|
4686 |
+
8053
|
4687 |
+
597
|
4688 |
+
8054
|
4689 |
+
8057
|
4690 |
+
8058
|
4691 |
+
8059
|
4692 |
+
8060
|
4693 |
+
8061
|
4694 |
+
8062
|
4695 |
+
598
|
4696 |
+
8064
|
4697 |
+
599
|
4698 |
+
8065
|
4699 |
+
8066
|
4700 |
+
8071
|
4701 |
+
600
|
4702 |
+
8072
|
4703 |
+
8073
|
4704 |
+
601
|
4705 |
+
8079
|
4706 |
+
8080
|
4707 |
+
602
|
4708 |
+
8083
|
4709 |
+
8085
|
4710 |
+
8087
|
4711 |
+
8088
|
4712 |
+
8090
|
4713 |
+
8091
|
4714 |
+
603
|
4715 |
+
8094
|
4716 |
+
8099
|
4717 |
+
8100
|
4718 |
+
8101
|
4719 |
+
8103
|
4720 |
+
8104
|
4721 |
+
8106
|
4722 |
+
8107
|
4723 |
+
8108
|
4724 |
+
8109
|
4725 |
+
8111
|
4726 |
+
8112
|
4727 |
+
8113
|
4728 |
+
8114
|
4729 |
+
8115
|
4730 |
+
8117
|
4731 |
+
8118
|
4732 |
+
8119
|
4733 |
+
8120
|
4734 |
+
8121
|
4735 |
+
8123
|
4736 |
+
8124
|
4737 |
+
8125
|
4738 |
+
604
|
4739 |
+
8126
|
4740 |
+
8127
|
4741 |
+
8128
|
4742 |
+
8129
|
4743 |
+
8131
|
4744 |
+
8133
|
4745 |
+
8134
|
4746 |
+
8135
|
4747 |
+
8136
|
4748 |
+
8137
|
4749 |
+
8141
|
4750 |
+
8144
|
4751 |
+
8146
|
4752 |
+
8148
|
4753 |
+
8150
|
4754 |
+
8151
|
4755 |
+
8152
|
4756 |
+
8153
|
4757 |
+
8154
|
4758 |
+
8155
|
4759 |
+
8158
|
4760 |
+
8159
|
4761 |
+
8163
|
4762 |
+
8164
|
4763 |
+
8165
|
4764 |
+
8168
|
4765 |
+
8170
|
4766 |
+
8172
|
4767 |
+
8173
|
4768 |
+
8174
|
4769 |
+
8178
|
4770 |
+
8180
|
4771 |
+
8181
|
4772 |
+
8182
|
4773 |
+
8183
|
4774 |
+
8184
|
4775 |
+
8187
|
4776 |
+
8188
|
4777 |
+
8189
|
4778 |
+
8190
|
4779 |
+
8193
|
4780 |
+
8194
|
4781 |
+
8196
|
4782 |
+
8200
|
4783 |
+
8201
|
4784 |
+
8205
|
4785 |
+
8208
|
4786 |
+
8210
|
4787 |
+
8213
|
4788 |
+
8215
|
4789 |
+
8219
|
4790 |
+
8223
|
4791 |
+
8225
|
4792 |
+
8227
|
4793 |
+
8229
|
4794 |
+
8230
|
4795 |
+
8232
|
4796 |
+
8236
|
4797 |
+
8238
|
4798 |
+
8239
|
4799 |
+
8244
|
4800 |
+
8247
|
4801 |
+
8249
|
4802 |
+
8250
|
4803 |
+
8251
|
4804 |
+
8253
|
4805 |
+
8255
|
4806 |
+
8257
|
4807 |
+
8259
|
4808 |
+
8262
|
4809 |
+
8263
|
4810 |
+
605
|
4811 |
+
8266
|
4812 |
+
606
|
4813 |
+
8267
|
4814 |
+
8269
|
4815 |
+
8271
|
4816 |
+
8274
|
4817 |
+
8275
|
4818 |
+
8277
|
4819 |
+
8279
|
4820 |
+
8280
|
4821 |
+
8282
|
4822 |
+
8284
|
4823 |
+
8285
|
4824 |
+
8287
|
4825 |
+
607
|
4826 |
+
8288
|
4827 |
+
8294
|
4828 |
+
8295
|
4829 |
+
8296
|
4830 |
+
8297
|
4831 |
+
8298
|
4832 |
+
8300
|
4833 |
+
8303
|
4834 |
+
8305
|
4835 |
+
608
|
4836 |
+
609
|
4837 |
+
8307
|
4838 |
+
8308
|
4839 |
+
8309
|
4840 |
+
610
|
4841 |
+
8310
|
4842 |
+
8312
|
4843 |
+
8313
|
4844 |
+
8316
|
4845 |
+
8317
|
4846 |
+
8318
|
4847 |
+
8320
|
4848 |
+
611
|
4849 |
+
612
|
4850 |
+
8324
|
4851 |
+
8325
|
4852 |
+
8326
|
4853 |
+
8327
|
4854 |
+
8328
|
4855 |
+
8329
|
4856 |
+
8330
|
4857 |
+
8331
|
4858 |
+
613
|
4859 |
+
8336
|
4860 |
+
8337
|
4861 |
+
8338
|
4862 |
+
8339
|
4863 |
+
8340
|
4864 |
+
8341
|
4865 |
+
8345
|
4866 |
+
8348
|
4867 |
+
8349
|
4868 |
+
8350
|
4869 |
+
8352
|
4870 |
+
8355
|
4871 |
+
8358
|
4872 |
+
8359
|
4873 |
+
8362
|
4874 |
+
8363
|
4875 |
+
8367
|
4876 |
+
8368
|
4877 |
+
8369
|
4878 |
+
8370
|
4879 |
+
8373
|
4880 |
+
8374
|
4881 |
+
8375
|
4882 |
+
8376
|
4883 |
+
8377
|
4884 |
+
8378
|
4885 |
+
8380
|
4886 |
+
8381
|
4887 |
+
8383
|
4888 |
+
8384
|
4889 |
+
8385
|
4890 |
+
8386
|
4891 |
+
8387
|
4892 |
+
8390
|
4893 |
+
8391
|
4894 |
+
8392
|
4895 |
+
8393
|
4896 |
+
8394
|
4897 |
+
614
|
4898 |
+
8397
|
4899 |
+
8402
|
4900 |
+
8404
|
4901 |
+
8405
|
4902 |
+
8406
|
4903 |
+
8407
|
4904 |
+
8408
|
4905 |
+
8409
|
4906 |
+
8410
|
4907 |
+
8412
|
4908 |
+
8413
|
4909 |
+
8414
|
4910 |
+
8416
|
4911 |
+
8418
|
4912 |
+
8419
|
4913 |
+
8420
|
4914 |
+
615
|
4915 |
+
8421
|
4916 |
+
8422
|
4917 |
+
8423
|
4918 |
+
8424
|
4919 |
+
8426
|
4920 |
+
8427
|
4921 |
+
8428
|
4922 |
+
8429
|
4923 |
+
8430
|
4924 |
+
8434
|
4925 |
+
616
|
4926 |
+
8435
|
4927 |
+
8437
|
4928 |
+
8439
|
4929 |
+
8443
|
4930 |
+
8444
|
4931 |
+
8445
|
4932 |
+
8447
|
4933 |
+
617
|
4934 |
+
8448
|
4935 |
+
8450
|
4936 |
+
8452
|
4937 |
+
8453
|
4938 |
+
8454
|
4939 |
+
618
|
4940 |
+
8458
|
4941 |
+
8459
|
4942 |
+
8461
|
4943 |
+
8462
|
4944 |
+
8463
|
4945 |
+
8465
|
4946 |
+
8467
|
4947 |
+
8468
|
4948 |
+
8470
|
4949 |
+
619
|
4950 |
+
8473
|
4951 |
+
8478
|
4952 |
+
8479
|
4953 |
+
8481
|
4954 |
+
8484
|
4955 |
+
8485
|
4956 |
+
8486
|
4957 |
+
8490
|
4958 |
+
620
|
4959 |
+
8492
|
4960 |
+
8493
|
4961 |
+
8494
|
4962 |
+
8495
|
4963 |
+
8498
|
4964 |
+
8499
|
4965 |
+
8500
|
4966 |
+
8503
|
4967 |
+
8504
|
4968 |
+
8505
|
4969 |
+
8506
|
4970 |
+
8508
|
4971 |
+
8510
|
4972 |
+
8512
|
4973 |
+
8515
|
4974 |
+
8517
|
4975 |
+
8518
|
4976 |
+
621
|
4977 |
+
8519
|
4978 |
+
8520
|
4979 |
+
8522
|
4980 |
+
8523
|
4981 |
+
8525
|
4982 |
+
8527
|
4983 |
+
8528
|
4984 |
+
8529
|
4985 |
+
8531
|
4986 |
+
8532
|
4987 |
+
8533
|
4988 |
+
8534
|
4989 |
+
8535
|
4990 |
+
8537
|
4991 |
+
8539
|
4992 |
+
8540
|
4993 |
+
8541
|
4994 |
+
622
|
4995 |
+
8543
|
4996 |
+
623
|
4997 |
+
8546
|
4998 |
+
8547
|
4999 |
+
8548
|
5000 |
+
8549
|
5001 |
+
8550
|
5002 |
+
8551
|
5003 |
+
8553
|
5004 |
+
624
|
5005 |
+
8554
|
5006 |
+
625
|
5007 |
+
8557
|
5008 |
+
8558
|
5009 |
+
8560
|
5010 |
+
8565
|
5011 |
+
8567
|
5012 |
+
8569
|
5013 |
+
8571
|
5014 |
+
626
|
5015 |
+
8572
|
5016 |
+
8574
|
5017 |
+
8575
|
5018 |
+
8576
|
5019 |
+
8577
|
5020 |
+
8578
|
5021 |
+
8579
|
5022 |
+
8580
|
5023 |
+
8581
|
5024 |
+
8583
|
5025 |
+
8584
|
5026 |
+
627
|
5027 |
+
8585
|
5028 |
+
8587
|
5029 |
+
628
|
5030 |
+
8589
|
5031 |
+
8591
|
5032 |
+
8592
|
5033 |
+
8593
|
5034 |
+
8596
|
5035 |
+
8600
|
5036 |
+
629
|
5037 |
+
8601
|
5038 |
+
8602
|
5039 |
+
8603
|
5040 |
+
8606
|
5041 |
+
8607
|
5042 |
+
8609
|
5043 |
+
8610
|
5044 |
+
630
|
5045 |
+
8612
|
5046 |
+
8613
|
5047 |
+
8614
|
5048 |
+
8615
|
5049 |
+
8619
|
5050 |
+
8620
|
5051 |
+
8621
|
5052 |
+
8623
|
5053 |
+
8624
|
5054 |
+
8625
|
5055 |
+
8626
|
5056 |
+
8627
|
5057 |
+
8628
|
5058 |
+
8630
|
5059 |
+
8632
|
5060 |
+
8633
|
5061 |
+
8636
|
5062 |
+
8637
|
5063 |
+
8640
|
5064 |
+
8641
|
5065 |
+
8642
|
5066 |
+
8644
|
5067 |
+
8645
|
5068 |
+
8647
|
5069 |
+
8650
|
5070 |
+
8651
|
5071 |
+
8654
|
5072 |
+
8655
|
5073 |
+
631
|
5074 |
+
632
|
5075 |
+
8657
|
5076 |
+
8661
|
5077 |
+
633
|
5078 |
+
8663
|
5079 |
+
8664
|
5080 |
+
8665
|
5081 |
+
8666
|
5082 |
+
8669
|
5083 |
+
8673
|
5084 |
+
8675
|
5085 |
+
8676
|
5086 |
+
8678
|
5087 |
+
8679
|
5088 |
+
8681
|
5089 |
+
634
|
5090 |
+
8685
|
5091 |
+
8687
|
5092 |
+
8690
|
5093 |
+
8691
|
5094 |
+
8692
|
5095 |
+
8693
|
5096 |
+
8694
|
5097 |
+
8695
|
5098 |
+
8697
|
5099 |
+
8698
|
5100 |
+
8700
|
5101 |
+
8702
|
5102 |
+
8710
|
5103 |
+
8711
|
5104 |
+
8712
|
5105 |
+
8713
|
5106 |
+
635
|
5107 |
+
8718
|
5108 |
+
8724
|
5109 |
+
8726
|
5110 |
+
8728
|
5111 |
+
8729
|
5112 |
+
636
|
5113 |
+
637
|
5114 |
+
638
|
5115 |
+
639
|
5116 |
+
8738
|
5117 |
+
8740
|
5118 |
+
8741
|
5119 |
+
8742
|
5120 |
+
8746
|
5121 |
+
8748
|
5122 |
+
8749
|
5123 |
+
8750
|
5124 |
+
8752
|
5125 |
+
8753
|
5126 |
+
8754
|
5127 |
+
8756
|
5128 |
+
8757
|
5129 |
+
8758
|
5130 |
+
8759
|
5131 |
+
8760
|
5132 |
+
640
|
5133 |
+
8761
|
5134 |
+
8762
|
5135 |
+
8763
|
5136 |
+
8764
|
5137 |
+
8766
|
5138 |
+
8767
|
5139 |
+
8768
|
5140 |
+
8769
|
5141 |
+
8771
|
5142 |
+
8773
|
5143 |
+
641
|
5144 |
+
8775
|
5145 |
+
8776
|
5146 |
+
642
|
5147 |
+
8777
|
5148 |
+
8778
|
5149 |
+
8779
|
5150 |
+
8783
|
5151 |
+
8785
|
5152 |
+
8787
|
5153 |
+
8789
|
5154 |
+
8790
|
5155 |
+
8791
|
5156 |
+
8792
|
5157 |
+
643
|
5158 |
+
8793
|
5159 |
+
8795
|
5160 |
+
8796
|
5161 |
+
8802
|
5162 |
+
8803
|
5163 |
+
8804
|
5164 |
+
8805
|
5165 |
+
8806
|
5166 |
+
8807
|
5167 |
+
8808
|
5168 |
+
8809
|
5169 |
+
8810
|
5170 |
+
8812
|
5171 |
+
644
|
5172 |
+
8817
|
5173 |
+
8818
|
5174 |
+
8819
|
5175 |
+
8822
|
5176 |
+
8823
|
5177 |
+
8824
|
5178 |
+
8825
|
5179 |
+
8827
|
5180 |
+
8828
|
5181 |
+
8829
|
5182 |
+
645
|
5183 |
+
646
|
5184 |
+
8834
|
5185 |
+
647
|
5186 |
+
8835
|
5187 |
+
8836
|
5188 |
+
8837
|
5189 |
+
8838
|
5190 |
+
8842
|
5191 |
+
8843
|
5192 |
+
8844
|
5193 |
+
8845
|
5194 |
+
8846
|
5195 |
+
8847
|
5196 |
+
8848
|
5197 |
+
8849
|
5198 |
+
648
|
5199 |
+
649
|
5200 |
+
8851
|
5201 |
+
8852
|
5202 |
+
8853
|
5203 |
+
8855
|
5204 |
+
8856
|
5205 |
+
8858
|
5206 |
+
8859
|
5207 |
+
8860
|
5208 |
+
8861
|
5209 |
+
8862
|
5210 |
+
8863
|
5211 |
+
8866
|
5212 |
+
8871
|
5213 |
+
8872
|
5214 |
+
8874
|
5215 |
+
8876
|
5216 |
+
8879
|
5217 |
+
8881
|
5218 |
+
8882
|
5219 |
+
8886
|
5220 |
+
8888
|
5221 |
+
650
|
5222 |
+
8889
|
5223 |
+
8890
|
5224 |
+
8891
|
5225 |
+
651
|
5226 |
+
8895
|
5227 |
+
8896
|
5228 |
+
8897
|
5229 |
+
652
|
5230 |
+
8900
|
5231 |
+
653
|
5232 |
+
8902
|
5233 |
+
8903
|
5234 |
+
8906
|
5235 |
+
8907
|
5236 |
+
8908
|
5237 |
+
8910
|
5238 |
+
8911
|
5239 |
+
8914
|
5240 |
+
8915
|
5241 |
+
8916
|
5242 |
+
8917
|
5243 |
+
8918
|
5244 |
+
8919
|
5245 |
+
8923
|
5246 |
+
8924
|
5247 |
+
654
|
5248 |
+
8926
|
5249 |
+
8928
|
5250 |
+
655
|
5251 |
+
656
|
5252 |
+
8931
|
5253 |
+
8932
|
5254 |
+
8933
|
5255 |
+
8934
|
5256 |
+
8936
|
5257 |
+
657
|
5258 |
+
8938
|
5259 |
+
8939
|
5260 |
+
658
|
5261 |
+
8940
|
5262 |
+
8941
|
5263 |
+
659
|
5264 |
+
8942
|
5265 |
+
8943
|
5266 |
+
8944
|
5267 |
+
660
|
5268 |
+
8946
|
5269 |
+
661
|
5270 |
+
662
|
5271 |
+
8950
|
5272 |
+
8952
|
5273 |
+
663
|
5274 |
+
8961
|
5275 |
+
8962
|
5276 |
+
664
|
5277 |
+
8963
|
5278 |
+
8964
|
5279 |
+
8967
|
5280 |
+
8971
|
5281 |
+
8973
|
5282 |
+
665
|
5283 |
+
8975
|
5284 |
+
8977
|
5285 |
+
8979
|
5286 |
+
8980
|
5287 |
+
8981
|
5288 |
+
8982
|
5289 |
+
666
|
5290 |
+
667
|
5291 |
+
8983
|
5292 |
+
8984
|
5293 |
+
668
|
5294 |
+
669
|
5295 |
+
8985
|
5296 |
+
8986
|
5297 |
+
8987
|
5298 |
+
8988
|
5299 |
+
8992
|
5300 |
+
8993
|
5301 |
+
8994
|
5302 |
+
8995
|
5303 |
+
8996
|
5304 |
+
670
|
5305 |
+
8997
|
5306 |
+
8998
|
5307 |
+
8999
|
5308 |
+
9000
|
5309 |
+
671
|
5310 |
+
672
|
5311 |
+
673
|
5312 |
+
9001
|
5313 |
+
674
|
5314 |
+
9002
|
5315 |
+
9003
|
5316 |
+
9004
|
5317 |
+
9005
|
5318 |
+
9006
|
5319 |
+
9007
|
5320 |
+
675
|
5321 |
+
9009
|
5322 |
+
9010
|
5323 |
+
9014
|
5324 |
+
9015
|
5325 |
+
9016
|
5326 |
+
9017
|
5327 |
+
9018
|
5328 |
+
9019
|
5329 |
+
9026
|
5330 |
+
9031
|
5331 |
+
9032
|
5332 |
+
9033
|
5333 |
+
9034
|
5334 |
+
9035
|
5335 |
+
9036
|
5336 |
+
9037
|
5337 |
+
9039
|
5338 |
+
9040
|
5339 |
+
676
|
5340 |
+
677
|
5341 |
+
9046
|
5342 |
+
9047
|
5343 |
+
9050
|
5344 |
+
9054
|
5345 |
+
9055
|
5346 |
+
9059
|
5347 |
+
9060
|
5348 |
+
9061
|
5349 |
+
9062
|
5350 |
+
9063
|
5351 |
+
9067
|
5352 |
+
9068
|
5353 |
+
678
|
5354 |
+
9073
|
5355 |
+
679
|
5356 |
+
9074
|
5357 |
+
9076
|
5358 |
+
9077
|
5359 |
+
9078
|
5360 |
+
9079
|
5361 |
+
9081
|
5362 |
+
9082
|
5363 |
+
9083
|
5364 |
+
9086
|
5365 |
+
9088
|
5366 |
+
9090
|
5367 |
+
9091
|
5368 |
+
9092
|
5369 |
+
9093
|
5370 |
+
9095
|
5371 |
+
9096
|
5372 |
+
9100
|
5373 |
+
9101
|
5374 |
+
9103
|
5375 |
+
9104
|
5376 |
+
9105
|
5377 |
+
9106
|
5378 |
+
9107
|
5379 |
+
9108
|
5380 |
+
9113
|
5381 |
+
9115
|
5382 |
+
9117
|
5383 |
+
9118
|
5384 |
+
680
|
5385 |
+
9122
|
5386 |
+
9123
|
5387 |
+
9125
|
5388 |
+
9126
|
5389 |
+
9129
|
5390 |
+
9131
|
5391 |
+
9133
|
5392 |
+
681
|
5393 |
+
9135
|
5394 |
+
9138
|
5395 |
+
9141
|
5396 |
+
9142
|
5397 |
+
9144
|
5398 |
+
9145
|
5399 |
+
9146
|
5400 |
+
9147
|
5401 |
+
9148
|
5402 |
+
682
|
5403 |
+
9150
|
5404 |
+
683
|
5405 |
+
9154
|
5406 |
+
9155
|
5407 |
+
684
|
5408 |
+
9158
|
5409 |
+
685
|
5410 |
+
9160
|
5411 |
+
9161
|
5412 |
+
9162
|
5413 |
+
9163
|
5414 |
+
9165
|
5415 |
+
9168
|
5416 |
+
9170
|
5417 |
+
9171
|
5418 |
+
686
|
5419 |
+
9173
|
5420 |
+
9174
|
5421 |
+
9176
|
5422 |
+
9177
|
5423 |
+
9180
|
5424 |
+
9182
|
5425 |
+
9183
|
5426 |
+
9184
|
5427 |
+
9185
|
5428 |
+
9188
|
5429 |
+
9189
|
5430 |
+
9191
|
5431 |
+
9192
|
5432 |
+
9196
|
5433 |
+
9197
|
5434 |
+
9199
|
5435 |
+
9200
|
5436 |
+
9201
|
5437 |
+
9203
|
5438 |
+
9204
|
5439 |
+
9205
|
5440 |
+
9207
|
5441 |
+
9209
|
5442 |
+
687
|
5443 |
+
9210
|
5444 |
+
9211
|
5445 |
+
9212
|
5446 |
+
9213
|
5447 |
+
9214
|
5448 |
+
9215
|
5449 |
+
9217
|
5450 |
+
9218
|
5451 |
+
9220
|
5452 |
+
9222
|
5453 |
+
9226
|
5454 |
+
9227
|
5455 |
+
688
|
5456 |
+
9228
|
5457 |
+
9229
|
5458 |
+
9230
|
5459 |
+
9233
|
5460 |
+
9234
|
5461 |
+
9235
|
5462 |
+
9236
|
5463 |
+
9237
|
5464 |
+
9238
|
5465 |
+
9240
|
5466 |
+
9242
|
5467 |
+
9243
|
5468 |
+
9245
|
5469 |
+
9247
|
5470 |
+
9248
|
5471 |
+
9249
|
5472 |
+
9250
|
5473 |
+
9253
|
5474 |
+
9254
|
5475 |
+
9255
|
5476 |
+
9256
|
5477 |
+
9258
|
5478 |
+
9259
|
5479 |
+
9261
|
5480 |
+
689
|
5481 |
+
690
|
5482 |
+
9265
|
5483 |
+
9266
|
5484 |
+
691
|
5485 |
+
9270
|
5486 |
+
9273
|
5487 |
+
9274
|
5488 |
+
9275
|
5489 |
+
9276
|
5490 |
+
9277
|
5491 |
+
692
|
5492 |
+
9278
|
5493 |
+
693
|
5494 |
+
9283
|
5495 |
+
9286
|
5496 |
+
694
|
5497 |
+
9287
|
5498 |
+
695
|
5499 |
+
9288
|
5500 |
+
9289
|
5501 |
+
9290
|
5502 |
+
9291
|
5503 |
+
696
|
5504 |
+
9293
|
5505 |
+
697
|
5506 |
+
9294
|
5507 |
+
698
|
5508 |
+
9295
|
5509 |
+
9296
|
5510 |
+
9301
|
5511 |
+
9302
|
5512 |
+
9304
|
5513 |
+
9307
|
5514 |
+
9308
|
5515 |
+
9312
|
5516 |
+
9313
|
5517 |
+
9314
|
5518 |
+
9315
|
5519 |
+
9316
|
5520 |
+
699
|
5521 |
+
9322
|
5522 |
+
9323
|
5523 |
+
9324
|
5524 |
+
9325
|
5525 |
+
9326
|
5526 |
+
9328
|
5527 |
+
9329
|
5528 |
+
9330
|
5529 |
+
9331
|
5530 |
+
9332
|
5531 |
+
9333
|
5532 |
+
9334
|
5533 |
+
9336
|
5534 |
+
9337
|
5535 |
+
700
|
5536 |
+
701
|
5537 |
+
702
|
5538 |
+
9344
|
5539 |
+
9346
|
5540 |
+
9347
|
5541 |
+
9348
|
5542 |
+
9349
|
5543 |
+
9351
|
5544 |
+
9352
|
5545 |
+
703
|
5546 |
+
704
|
5547 |
+
9353
|
5548 |
+
9354
|
5549 |
+
9355
|
5550 |
+
9356
|
5551 |
+
9357
|
5552 |
+
9359
|
5553 |
+
9360
|
5554 |
+
9361
|
5555 |
+
705
|
5556 |
+
9365
|
5557 |
+
9366
|
5558 |
+
9367
|
5559 |
+
9368
|
5560 |
+
9373
|
5561 |
+
9374
|
5562 |
+
9375
|
5563 |
+
9376
|
5564 |
+
9377
|
5565 |
+
706
|
5566 |
+
9381
|
5567 |
+
9385
|
5568 |
+
9386
|
5569 |
+
9387
|
5570 |
+
9389
|
5571 |
+
9390
|
5572 |
+
707
|
5573 |
+
9392
|
5574 |
+
9393
|
5575 |
+
9396
|
5576 |
+
9397
|
5577 |
+
708
|
5578 |
+
9398
|
5579 |
+
9399
|
5580 |
+
9400
|
5581 |
+
9401
|
5582 |
+
9402
|
5583 |
+
9403
|
5584 |
+
9407
|
5585 |
+
9409
|
5586 |
+
9410
|
5587 |
+
9414
|
5588 |
+
9419
|
5589 |
+
709
|
5590 |
+
710
|
5591 |
+
9421
|
5592 |
+
9422
|
5593 |
+
9423
|
5594 |
+
9427
|
5595 |
+
9428
|
5596 |
+
9429
|
5597 |
+
9430
|
5598 |
+
9431
|
5599 |
+
9433
|
5600 |
+
9435
|
5601 |
+
9436
|
5602 |
+
9437
|
5603 |
+
9438
|
5604 |
+
9439
|
5605 |
+
9440
|
5606 |
+
9442
|
5607 |
+
9443
|
5608 |
+
711
|
5609 |
+
9446
|
5610 |
+
9447
|
5611 |
+
9449
|
5612 |
+
9451
|
5613 |
+
9453
|
5614 |
+
9454
|
5615 |
+
9455
|
5616 |
+
9456
|
5617 |
+
9457
|
5618 |
+
712
|
5619 |
+
9460
|
5620 |
+
9461
|
5621 |
+
9462
|
5622 |
+
9463
|
5623 |
+
9466
|
5624 |
+
9467
|
5625 |
+
713
|
5626 |
+
9470
|
5627 |
+
9473
|
5628 |
+
9474
|
5629 |
+
9475
|
5630 |
+
9477
|
5631 |
+
9479
|
5632 |
+
714
|
5633 |
+
715
|
5634 |
+
716
|
5635 |
+
717
|
5636 |
+
9487
|
5637 |
+
9488
|
5638 |
+
718
|
5639 |
+
9493
|
5640 |
+
9494
|
5641 |
+
9495
|
5642 |
+
9496
|
5643 |
+
9497
|
5644 |
+
9499
|
5645 |
+
9500
|
5646 |
+
719
|
5647 |
+
9503
|
5648 |
+
720
|
5649 |
+
9504
|
5650 |
+
9505
|
5651 |
+
9506
|
5652 |
+
721
|
5653 |
+
9507
|
5654 |
+
9508
|
5655 |
+
9509
|
5656 |
+
9511
|
5657 |
+
9515
|
5658 |
+
9516
|
5659 |
+
9517
|
5660 |
+
9519
|
5661 |
+
9520
|
5662 |
+
9521
|
5663 |
+
9523
|
5664 |
+
722
|
5665 |
+
9527
|
5666 |
+
9528
|
5667 |
+
9529
|
5668 |
+
9531
|
5669 |
+
9533
|
5670 |
+
9534
|
5671 |
+
9535
|
5672 |
+
723
|
5673 |
+
9540
|
5674 |
+
9541
|
5675 |
+
9544
|
5676 |
+
9545
|
5677 |
+
724
|
5678 |
+
9546
|
5679 |
+
9547
|
5680 |
+
9548
|
5681 |
+
9549
|
5682 |
+
9550
|
5683 |
+
9551
|
5684 |
+
725
|
5685 |
+
9554
|
5686 |
+
9556
|
5687 |
+
9563
|
5688 |
+
9566
|
5689 |
+
9567
|
5690 |
+
726
|
5691 |
+
9568
|
5692 |
+
9569
|
5693 |
+
727
|
5694 |
+
9574
|
5695 |
+
9575
|
5696 |
+
9576
|
5697 |
+
9577
|
5698 |
+
9578
|
5699 |
+
9579
|
5700 |
+
9580
|
5701 |
+
728
|
5702 |
+
9583
|
5703 |
+
9584
|
5704 |
+
9587
|
5705 |
+
9588
|
5706 |
+
9589
|
5707 |
+
729
|
5708 |
+
9593
|
5709 |
+
9596
|
5710 |
+
9599
|
5711 |
+
9600
|
5712 |
+
9602
|
5713 |
+
9603
|
5714 |
+
9604
|
5715 |
+
9605
|
5716 |
+
9606
|
5717 |
+
9607
|
5718 |
+
9611
|
5719 |
+
9612
|
5720 |
+
9613
|
5721 |
+
730
|
5722 |
+
9614
|
5723 |
+
9615
|
5724 |
+
9617
|
5725 |
+
9618
|
5726 |
+
731
|
5727 |
+
9621
|
5728 |
+
9622
|
5729 |
+
9629
|
5730 |
+
9630
|
5731 |
+
9632
|
5732 |
+
9633
|
5733 |
+
9634
|
5734 |
+
9635
|
5735 |
+
9636
|
5736 |
+
9637
|
5737 |
+
9638
|
5738 |
+
9639
|
5739 |
+
732
|
5740 |
+
733
|
5741 |
+
9646
|
5742 |
+
734
|
5743 |
+
9647
|
5744 |
+
9649
|
5745 |
+
9650
|
5746 |
+
9651
|
5747 |
+
9654
|
5748 |
+
9655
|
5749 |
+
735
|
5750 |
+
9658
|
5751 |
+
9659
|
5752 |
+
9660
|
5753 |
+
9661
|
5754 |
+
9662
|
5755 |
+
9663
|
5756 |
+
736
|
5757 |
+
9664
|
5758 |
+
9666
|
5759 |
+
737
|
5760 |
+
9668
|
5761 |
+
9672
|
5762 |
+
9673
|
5763 |
+
9674
|
5764 |
+
9675
|
5765 |
+
9676
|
5766 |
+
9677
|
5767 |
+
9678
|
5768 |
+
9679
|
5769 |
+
9681
|
5770 |
+
9682
|
5771 |
+
9683
|
5772 |
+
9684
|
5773 |
+
9686
|
5774 |
+
9687
|
5775 |
+
9688
|
5776 |
+
9691
|
5777 |
+
9692
|
5778 |
+
9698
|
5779 |
+
9701
|
5780 |
+
738
|
5781 |
+
9702
|
5782 |
+
9705
|
5783 |
+
9706
|
5784 |
+
9707
|
5785 |
+
9708
|
5786 |
+
739
|
5787 |
+
9709
|
5788 |
+
9711
|
5789 |
+
9712
|
5790 |
+
9713
|
5791 |
+
9714
|
5792 |
+
9716
|
5793 |
+
9719
|
5794 |
+
9722
|
5795 |
+
740
|
5796 |
+
9723
|
5797 |
+
9725
|
5798 |
+
9727
|
5799 |
+
9728
|
5800 |
+
9729
|
5801 |
+
741
|
5802 |
+
9733
|
5803 |
+
9736
|
5804 |
+
9738
|
5805 |
+
9739
|
5806 |
+
9741
|
5807 |
+
9743
|
5808 |
+
9745
|
5809 |
+
9747
|
5810 |
+
9749
|
5811 |
+
9750
|
5812 |
+
9753
|
5813 |
+
9755
|
5814 |
+
9757
|
5815 |
+
9758
|
5816 |
+
742
|
5817 |
+
9759
|
5818 |
+
9760
|
5819 |
+
743
|
5820 |
+
9768
|
5821 |
+
744
|
5822 |
+
745
|
5823 |
+
9769
|
5824 |
+
9774
|
5825 |
+
9775
|
5826 |
+
9776
|
5827 |
+
9781
|
5828 |
+
9785
|
5829 |
+
9786
|
5830 |
+
9788
|
5831 |
+
9789
|
5832 |
+
9790
|
5833 |
+
9793
|
5834 |
+
9795
|
5835 |
+
9796
|
5836 |
+
746
|
5837 |
+
9798
|
5838 |
+
9800
|
5839 |
+
9801
|
5840 |
+
9803
|
5841 |
+
9804
|
5842 |
+
9809
|
5843 |
+
9810
|
5844 |
+
9818
|
5845 |
+
747
|
5846 |
+
9820
|
5847 |
+
9821
|
5848 |
+
9822
|
5849 |
+
9823
|
5850 |
+
9825
|
5851 |
+
9826
|
5852 |
+
748
|
5853 |
+
9827
|
5854 |
+
9828
|
5855 |
+
9829
|
5856 |
+
9831
|
5857 |
+
9834
|
5858 |
+
9835
|
5859 |
+
9836
|
5860 |
+
9837
|
5861 |
+
9838
|
5862 |
+
9840
|
5863 |
+
9841
|
5864 |
+
9845
|
5865 |
+
9847
|
5866 |
+
9852
|
5867 |
+
9855
|
5868 |
+
749
|
5869 |
+
750
|
5870 |
+
9858
|
5871 |
+
9863
|
5872 |
+
9864
|
5873 |
+
9866
|
5874 |
+
9870
|
5875 |
+
751
|
5876 |
+
9873
|
5877 |
+
9874
|
5878 |
+
9875
|
5879 |
+
9876
|
5880 |
+
9877
|
5881 |
+
752
|
5882 |
+
9880
|
5883 |
+
9881
|
5884 |
+
9882
|
5885 |
+
9883
|
5886 |
+
753
|
5887 |
+
9885
|
5888 |
+
9886
|
5889 |
+
9887
|
5890 |
+
754
|
5891 |
+
9888
|
5892 |
+
9891
|
5893 |
+
9896
|
5894 |
+
9897
|
5895 |
+
9898
|
5896 |
+
9899
|
5897 |
+
755
|
5898 |
+
9902
|
5899 |
+
9903
|
5900 |
+
9904
|
5901 |
+
9907
|
5902 |
+
9908
|
5903 |
+
9909
|
5904 |
+
9910
|
5905 |
+
9911
|
5906 |
+
9913
|
5907 |
+
9916
|
5908 |
+
756
|
5909 |
+
9917
|
5910 |
+
9918
|
5911 |
+
9919
|
5912 |
+
9920
|
5913 |
+
9921
|
5914 |
+
9923
|
5915 |
+
9926
|
5916 |
+
9928
|
5917 |
+
9931
|
5918 |
+
9932
|
5919 |
+
9933
|
5920 |
+
9934
|
5921 |
+
9935
|
5922 |
+
9937
|
5923 |
+
9939
|
5924 |
+
9940
|
5925 |
+
9945
|
5926 |
+
9946
|
5927 |
+
9947
|
5928 |
+
9948
|
5929 |
+
9949
|
5930 |
+
9953
|
5931 |
+
9954
|
5932 |
+
9955
|
5933 |
+
9957
|
5934 |
+
9959
|
5935 |
+
9962
|
5936 |
+
9963
|
5937 |
+
9966
|
5938 |
+
9968
|
5939 |
+
9969
|
5940 |
+
9970
|
5941 |
+
9972
|
5942 |
+
9975
|
5943 |
+
9976
|
5944 |
+
9977
|
5945 |
+
9979
|
5946 |
+
9980
|
5947 |
+
9981
|
5948 |
+
757
|
5949 |
+
9982
|
5950 |
+
9983
|
5951 |
+
9986
|
5952 |
+
758
|
5953 |
+
9993
|
5954 |
+
9994
|
5955 |
+
9995
|
5956 |
+
9996
|
5957 |
+
9997
|
5958 |
+
9999
|
5959 |
+
759
|
5960 |
+
10002
|
5961 |
+
10003
|
5962 |
+
10004
|
5963 |
+
760
|
5964 |
+
10008
|
5965 |
+
10009
|
5966 |
+
10010
|
5967 |
+
10011
|
5968 |
+
10012
|
5969 |
+
10014
|
5970 |
+
10015
|
5971 |
+
761
|
5972 |
+
10016
|
5973 |
+
10017
|
5974 |
+
10018
|
5975 |
+
10020
|
5976 |
+
10022
|
5977 |
+
10023
|
5978 |
+
10024
|
5979 |
+
10029
|
5980 |
+
10032
|
5981 |
+
10034
|
5982 |
+
10035
|
5983 |
+
10036
|
5984 |
+
10037
|
5985 |
+
10038
|
5986 |
+
762
|
5987 |
+
10039
|
5988 |
+
10042
|
5989 |
+
10043
|
5990 |
+
10044
|
5991 |
+
10046
|
5992 |
+
10048
|
5993 |
+
10051
|
5994 |
+
763
|
5995 |
+
10056
|
5996 |
+
10061
|
5997 |
+
10062
|
5998 |
+
10063
|
5999 |
+
10070
|
6000 |
+
10071
|
6001 |
+
10072
|
6002 |
+
10073
|
6003 |
+
764
|
6004 |
+
10076
|
6005 |
+
10079
|
6006 |
+
10083
|
6007 |
+
10084
|
6008 |
+
10090
|
6009 |
+
10092
|
6010 |
+
10093
|
6011 |
+
10094
|
6012 |
+
10095
|
6013 |
+
10097
|
6014 |
+
10101
|
6015 |
+
10103
|
6016 |
+
10104
|
6017 |
+
10108
|
6018 |
+
10110
|
6019 |
+
10111
|
6020 |
+
10112
|
6021 |
+
765
|
6022 |
+
10114
|
6023 |
+
10117
|
6024 |
+
10119
|
6025 |
+
10120
|
6026 |
+
10121
|
6027 |
+
10122
|
6028 |
+
10123
|
6029 |
+
10125
|
6030 |
+
10127
|
6031 |
+
10128
|
6032 |
+
10129
|
6033 |
+
10130
|
6034 |
+
10131
|
6035 |
+
10132
|
6036 |
+
10133
|
6037 |
+
10134
|
6038 |
+
10135
|
6039 |
+
10136
|
6040 |
+
10138
|
6041 |
+
10140
|
6042 |
+
10141
|
6043 |
+
10143
|
6044 |
+
10145
|
6045 |
+
10146
|
6046 |
+
10149
|
6047 |
+
10154
|
6048 |
+
10155
|
6049 |
+
10156
|
6050 |
+
766
|
6051 |
+
10157
|
6052 |
+
10159
|
6053 |
+
10160
|
6054 |
+
10161
|
6055 |
+
10162
|
6056 |
+
10163
|
6057 |
+
10164
|
6058 |
+
10167
|
6059 |
+
10168
|
6060 |
+
10169
|
6061 |
+
10170
|
6062 |
+
10171
|
6063 |
+
10173
|
6064 |
+
10177
|
6065 |
+
10179
|
6066 |
+
10180
|
6067 |
+
10181
|
6068 |
+
10183
|
6069 |
+
10184
|
6070 |
+
10185
|
6071 |
+
10186
|
6072 |
+
767
|
6073 |
+
10189
|
6074 |
+
10191
|
6075 |
+
768
|
6076 |
+
10192
|
6077 |
+
769
|
6078 |
+
10193
|
6079 |
+
10194
|
6080 |
+
10195
|
6081 |
+
10196
|
6082 |
+
10198
|
6083 |
+
770
|
6084 |
+
10200
|
6085 |
+
10203
|
6086 |
+
10204
|
6087 |
+
10205
|
6088 |
+
10207
|
6089 |
+
10208
|
6090 |
+
10209
|
6091 |
+
10210
|
6092 |
+
10211
|
6093 |
+
10212
|
6094 |
+
10216
|
6095 |
+
10217
|
6096 |
+
10218
|
6097 |
+
10219
|
6098 |
+
10220
|
6099 |
+
10221
|
6100 |
+
10222
|
6101 |
+
771
|
6102 |
+
10226
|
6103 |
+
10229
|
6104 |
+
10230
|
6105 |
+
772
|
6106 |
+
10237
|
6107 |
+
10238
|
6108 |
+
10239
|
6109 |
+
10240
|
6110 |
+
10241
|
6111 |
+
10242
|
6112 |
+
10243
|
6113 |
+
10244
|
6114 |
+
10248
|
6115 |
+
10249
|
6116 |
+
10251
|
6117 |
+
10254
|
6118 |
+
10255
|
6119 |
+
773
|
6120 |
+
10258
|
6121 |
+
10259
|
6122 |
+
10261
|
6123 |
+
10263
|
6124 |
+
774
|
6125 |
+
10265
|
6126 |
+
10266
|
6127 |
+
10267
|
6128 |
+
10268
|
6129 |
+
10269
|
6130 |
+
10270
|
6131 |
+
10271
|
6132 |
+
10272
|
6133 |
+
10274
|
6134 |
+
10275
|
6135 |
+
775
|
6136 |
+
10276
|
6137 |
+
10277
|
6138 |
+
10278
|
6139 |
+
10279
|
6140 |
+
10280
|
6141 |
+
10281
|
6142 |
+
10282
|
6143 |
+
10283
|
6144 |
+
10285
|
6145 |
+
10287
|
6146 |
+
10288
|
6147 |
+
10289
|
6148 |
+
10290
|
6149 |
+
10291
|
6150 |
+
10293
|
6151 |
+
776
|
6152 |
+
10296
|
6153 |
+
777
|
6154 |
+
10297
|
6155 |
+
10298
|
6156 |
+
778
|
6157 |
+
10301
|
6158 |
+
10302
|
6159 |
+
10303
|
6160 |
+
10304
|
6161 |
+
10306
|
6162 |
+
10307
|
6163 |
+
10308
|
6164 |
+
10310
|
6165 |
+
10311
|
6166 |
+
10312
|
6167 |
+
10313
|
6168 |
+
779
|
6169 |
+
10314
|
6170 |
+
780
|
6171 |
+
10318
|
6172 |
+
10320
|
6173 |
+
10323
|
6174 |
+
10324
|
6175 |
+
10325
|
6176 |
+
10326
|
6177 |
+
781
|
6178 |
+
10327
|
6179 |
+
10330
|
6180 |
+
10334
|
6181 |
+
782
|
6182 |
+
10335
|
6183 |
+
783
|
6184 |
+
10337
|
6185 |
+
10338
|
6186 |
+
784
|
6187 |
+
10341
|
6188 |
+
10342
|
6189 |
+
10348
|
6190 |
+
10349
|
6191 |
+
10354
|
6192 |
+
10355
|
6193 |
+
10356
|
6194 |
+
10361
|
6195 |
+
10362
|
6196 |
+
10365
|
6197 |
+
10366
|
6198 |
+
10367
|
6199 |
+
10369
|
6200 |
+
10370
|
6201 |
+
10371
|
6202 |
+
785
|
6203 |
+
10372
|
6204 |
+
10373
|
6205 |
+
10375
|
6206 |
+
10376
|
6207 |
+
10377
|
6208 |
+
10378
|
6209 |
+
10382
|
6210 |
+
10384
|
6211 |
+
10388
|
6212 |
+
10392
|
6213 |
+
10394
|
6214 |
+
10398
|
6215 |
+
10399
|
6216 |
+
10400
|
6217 |
+
10401
|
6218 |
+
10402
|
6219 |
+
10403
|
6220 |
+
10404
|
6221 |
+
10405
|
6222 |
+
10408
|
6223 |
+
10409
|
6224 |
+
10410
|
6225 |
+
10412
|
6226 |
+
10413
|
6227 |
+
10415
|
6228 |
+
10419
|
6229 |
+
10420
|
6230 |
+
10422
|
6231 |
+
10423
|
6232 |
+
10424
|
6233 |
+
10426
|
6234 |
+
10430
|
6235 |
+
10431
|
6236 |
+
10432
|
6237 |
+
10433
|
6238 |
+
10434
|
6239 |
+
10436
|
6240 |
+
10437
|
6241 |
+
10438
|
6242 |
+
786
|
6243 |
+
10439
|
6244 |
+
10440
|
6245 |
+
10441
|
6246 |
+
10443
|
6247 |
+
10444
|
6248 |
+
10446
|
6249 |
+
10447
|
6250 |
+
10448
|
6251 |
+
10449
|
6252 |
+
10452
|
6253 |
+
10453
|
6254 |
+
10455
|
6255 |
+
10456
|
6256 |
+
10457
|
6257 |
+
10458
|
6258 |
+
10459
|
6259 |
+
10460
|
6260 |
+
10461
|
6261 |
+
10462
|
6262 |
+
10464
|
6263 |
+
10465
|
6264 |
+
10466
|
6265 |
+
10467
|
6266 |
+
10470
|
6267 |
+
10472
|
6268 |
+
10474
|
6269 |
+
10475
|
6270 |
+
10476
|
6271 |
+
10477
|
6272 |
+
10478
|
6273 |
+
10481
|
6274 |
+
10483
|
6275 |
+
10484
|
6276 |
+
10485
|
6277 |
+
787
|
6278 |
+
10488
|
6279 |
+
10490
|
6280 |
+
10494
|
6281 |
+
10495
|
6282 |
+
10497
|
6283 |
+
10500
|
6284 |
+
10501
|
6285 |
+
10502
|
6286 |
+
10506
|
6287 |
+
10507
|
6288 |
+
10508
|
6289 |
+
10509
|
6290 |
+
10510
|
6291 |
+
10511
|
6292 |
+
10512
|
6293 |
+
10513
|
6294 |
+
10514
|
6295 |
+
788
|
6296 |
+
10515
|
6297 |
+
10516
|
6298 |
+
789
|
6299 |
+
10517
|
6300 |
+
10520
|
6301 |
+
10522
|
6302 |
+
790
|
6303 |
+
791
|
6304 |
+
10525
|
6305 |
+
10526
|
6306 |
+
10529
|
6307 |
+
10530
|
6308 |
+
10531
|
6309 |
+
10532
|
6310 |
+
10533
|
6311 |
+
10534
|
6312 |
+
10535
|
6313 |
+
10536
|
6314 |
+
10537
|
6315 |
+
10538
|
6316 |
+
792
|
6317 |
+
10539
|
6318 |
+
10541
|
6319 |
+
10542
|
6320 |
+
793
|
6321 |
+
794
|
6322 |
+
10543
|
6323 |
+
10544
|
6324 |
+
10547
|
6325 |
+
10548
|
6326 |
+
10549
|
6327 |
+
10550
|
6328 |
+
10553
|
6329 |
+
10554
|
6330 |
+
10555
|
6331 |
+
10556
|
6332 |
+
10557
|
6333 |
+
10558
|
6334 |
+
10559
|
6335 |
+
10560
|
6336 |
+
10564
|
6337 |
+
10566
|
6338 |
+
10567
|
6339 |
+
10570
|
6340 |
+
10571
|
6341 |
+
10575
|
6342 |
+
10576
|
6343 |
+
10577
|
6344 |
+
10579
|
6345 |
+
10581
|
6346 |
+
10582
|
6347 |
+
10584
|
6348 |
+
10585
|
6349 |
+
10587
|
6350 |
+
10591
|
6351 |
+
10592
|
6352 |
+
10593
|
6353 |
+
10594
|
6354 |
+
10595
|
6355 |
+
10598
|
6356 |
+
10599
|
6357 |
+
10601
|
6358 |
+
10602
|
6359 |
+
10603
|
6360 |
+
10605
|
6361 |
+
10606
|
6362 |
+
10608
|
6363 |
+
10610
|
6364 |
+
10613
|
6365 |
+
10616
|
6366 |
+
795
|
6367 |
+
10617
|
6368 |
+
10619
|
6369 |
+
10620
|
6370 |
+
10621
|
6371 |
+
10622
|
6372 |
+
10623
|
6373 |
+
10625
|
6374 |
+
796
|
6375 |
+
10626
|
6376 |
+
10627
|
6377 |
+
10629
|
6378 |
+
10631
|
6379 |
+
10632
|
6380 |
+
10633
|
6381 |
+
10634
|
6382 |
+
10635
|
6383 |
+
10638
|
6384 |
+
10640
|
6385 |
+
10642
|
6386 |
+
10646
|
6387 |
+
10649
|
6388 |
+
10650
|
6389 |
+
797
|
6390 |
+
10654
|
6391 |
+
10656
|
6392 |
+
10657
|
6393 |
+
10660
|
6394 |
+
10661
|
6395 |
+
10662
|
6396 |
+
10663
|
6397 |
+
798
|
6398 |
+
799
|
6399 |
+
10667
|
6400 |
+
10669
|
6401 |
+
10672
|
6402 |
+
10675
|
6403 |
+
10677
|
6404 |
+
10678
|
6405 |
+
10680
|
6406 |
+
10682
|
6407 |
+
800
|
6408 |
+
10686
|
6409 |
+
10687
|
6410 |
+
10689
|
6411 |
+
10692
|
6412 |
+
10694
|
6413 |
+
10695
|
6414 |
+
10697
|
6415 |
+
10698
|
6416 |
+
10699
|
6417 |
+
10702
|
6418 |
+
10703
|
6419 |
+
10704
|
6420 |
+
10706
|
6421 |
+
10707
|
6422 |
+
10708
|
6423 |
+
10709
|
6424 |
+
10710
|
6425 |
+
10711
|
6426 |
+
10712
|
6427 |
+
10713
|
6428 |
+
10715
|
6429 |
+
10716
|
6430 |
+
801
|
6431 |
+
10717
|
6432 |
+
10718
|
6433 |
+
802
|
6434 |
+
803
|
6435 |
+
10719
|
6436 |
+
10720
|
6437 |
+
10721
|
6438 |
+
10722
|
6439 |
+
10723
|
6440 |
+
10725
|
6441 |
+
10726
|
6442 |
+
804
|
6443 |
+
805
|
6444 |
+
806
|
6445 |
+
10728
|
6446 |
+
10731
|
6447 |
+
10734
|
6448 |
+
10736
|
6449 |
+
10738
|
6450 |
+
10739
|
6451 |
+
10740
|
6452 |
+
10741
|
6453 |
+
10742
|
6454 |
+
807
|
6455 |
+
10743
|
6456 |
+
10744
|
6457 |
+
10745
|
6458 |
+
10746
|
6459 |
+
10747
|
6460 |
+
808
|
6461 |
+
10751
|
6462 |
+
10753
|
6463 |
+
10754
|
6464 |
+
10757
|
6465 |
+
10760
|
6466 |
+
809
|
6467 |
+
10764
|
6468 |
+
10765
|
6469 |
+
810
|
6470 |
+
10772
|
6471 |
+
10773
|
6472 |
+
811
|
6473 |
+
10775
|
6474 |
+
812
|
6475 |
+
10776
|
6476 |
+
10777
|
6477 |
+
10778
|
6478 |
+
10780
|
6479 |
+
10784
|
6480 |
+
10793
|
6481 |
+
10796
|
6482 |
+
10797
|
6483 |
+
813
|
6484 |
+
10798
|
6485 |
+
10800
|
6486 |
+
10802
|
6487 |
+
10805
|
6488 |
+
10806
|
6489 |
+
10808
|
6490 |
+
10810
|
6491 |
+
814
|
6492 |
+
10811
|
6493 |
+
10812
|
6494 |
+
10813
|
6495 |
+
10815
|
6496 |
+
10817
|
6497 |
+
815
|
6498 |
+
10819
|
6499 |
+
816
|
6500 |
+
10821
|
6501 |
+
10823
|
6502 |
+
10824
|
6503 |
+
10825
|
6504 |
+
10826
|
6505 |
+
10830
|
6506 |
+
10831
|
6507 |
+
10832
|
6508 |
+
10840
|
6509 |
+
10841
|
6510 |
+
10844
|
6511 |
+
10845
|
6512 |
+
10846
|
6513 |
+
10847
|
6514 |
+
10848
|
6515 |
+
10849
|
6516 |
+
10850
|
6517 |
+
10851
|
6518 |
+
10853
|
6519 |
+
10854
|
6520 |
+
10855
|
6521 |
+
10856
|
6522 |
+
10857
|
6523 |
+
10858
|
6524 |
+
817
|
6525 |
+
10859
|
6526 |
+
10861
|
6527 |
+
10862
|
6528 |
+
818
|
6529 |
+
10866
|
6530 |
+
10867
|
6531 |
+
10868
|
6532 |
+
10871
|
6533 |
+
10872
|
6534 |
+
10874
|
6535 |
+
10875
|
6536 |
+
10877
|
6537 |
+
10878
|
6538 |
+
10879
|
6539 |
+
10880
|
6540 |
+
10881
|
6541 |
+
10882
|
6542 |
+
10883
|
6543 |
+
10888
|
6544 |
+
10889
|
6545 |
+
10891
|
6546 |
+
10892
|
6547 |
+
10893
|
6548 |
+
10894
|
6549 |
+
10898
|
6550 |
+
10899
|
6551 |
+
10900
|
6552 |
+
10901
|
6553 |
+
10903
|
6554 |
+
10905
|
6555 |
+
819
|
6556 |
+
10906
|
6557 |
+
10907
|
6558 |
+
10908
|
6559 |
+
10909
|
6560 |
+
10910
|
6561 |
+
10912
|
6562 |
+
10913
|
6563 |
+
10914
|
6564 |
+
10917
|
6565 |
+
10918
|
6566 |
+
10921
|
6567 |
+
10927
|
6568 |
+
10928
|
6569 |
+
10929
|
6570 |
+
10930
|
6571 |
+
10931
|
6572 |
+
10933
|
6573 |
+
10934
|
6574 |
+
10936
|
6575 |
+
10938
|
6576 |
+
10939
|
6577 |
+
10940
|
6578 |
+
10942
|
6579 |
+
10944
|
6580 |
+
10945
|
6581 |
+
10946
|
6582 |
+
10947
|
6583 |
+
10951
|
6584 |
+
10952
|
6585 |
+
10953
|
6586 |
+
10954
|
6587 |
+
820
|
6588 |
+
10955
|
6589 |
+
10958
|
6590 |
+
10959
|
6591 |
+
821
|
6592 |
+
822
|
6593 |
+
10960
|
6594 |
+
10962
|
6595 |
+
10963
|
6596 |
+
10967
|
6597 |
+
10968
|
6598 |
+
10969
|
6599 |
+
10972
|
6600 |
+
10974
|
6601 |
+
10975
|
6602 |
+
10976
|
6603 |
+
10978
|
6604 |
+
10979
|
6605 |
+
10982
|
6606 |
+
823
|
6607 |
+
10983
|
6608 |
+
10984
|
6609 |
+
10985
|
6610 |
+
10986
|
6611 |
+
10988
|
6612 |
+
10989
|
6613 |
+
10993
|
6614 |
+
10997
|
6615 |
+
10999
|
6616 |
+
11000
|
6617 |
+
11003
|
6618 |
+
11005
|
6619 |
+
11007
|
6620 |
+
11008
|
6621 |
+
11010
|
6622 |
+
824
|
6623 |
+
825
|
6624 |
+
11014
|
6625 |
+
11015
|
6626 |
+
11016
|
6627 |
+
11017
|
6628 |
+
11019
|
6629 |
+
826
|
6630 |
+
11021
|
6631 |
+
11024
|
6632 |
+
11027
|
6633 |
+
11028
|
6634 |
+
827
|
6635 |
+
11031
|
6636 |
+
11033
|
6637 |
+
11035
|
6638 |
+
11036
|
6639 |
+
11038
|
6640 |
+
11041
|
6641 |
+
828
|
6642 |
+
11042
|
6643 |
+
11044
|
6644 |
+
11045
|
6645 |
+
11046
|
6646 |
+
11047
|
6647 |
+
11049
|
6648 |
+
11050
|
6649 |
+
829
|
6650 |
+
11051
|
6651 |
+
11052
|
6652 |
+
830
|
6653 |
+
11054
|
6654 |
+
11057
|
6655 |
+
11058
|
6656 |
+
11062
|
6657 |
+
11064
|
6658 |
+
11065
|
6659 |
+
11066
|
6660 |
+
11067
|
6661 |
+
11070
|
6662 |
+
11074
|
6663 |
+
11075
|
6664 |
+
831
|
6665 |
+
11076
|
6666 |
+
11077
|
6667 |
+
11079
|
6668 |
+
11080
|
6669 |
+
832
|
6670 |
+
11081
|
6671 |
+
11085
|
6672 |
+
11086
|
6673 |
+
833
|
6674 |
+
11089
|
6675 |
+
11092
|
6676 |
+
11093
|
6677 |
+
11097
|
6678 |
+
11098
|
6679 |
+
11100
|
6680 |
+
834
|
6681 |
+
11102
|
6682 |
+
11103
|
6683 |
+
11104
|
6684 |
+
11105
|
6685 |
+
11107
|
6686 |
+
11108
|
6687 |
+
11109
|
6688 |
+
11110
|
6689 |
+
835
|
6690 |
+
11111
|
6691 |
+
836
|
6692 |
+
837
|
6693 |
+
11114
|
6694 |
+
11116
|
6695 |
+
11117
|
6696 |
+
838
|
6697 |
+
11118
|
6698 |
+
11120
|
6699 |
+
11122
|
6700 |
+
11123
|
6701 |
+
11124
|
6702 |
+
11128
|
6703 |
+
11129
|
6704 |
+
11132
|
6705 |
+
11133
|
6706 |
+
11134
|
6707 |
+
11135
|
6708 |
+
11140
|
6709 |
+
11144
|
6710 |
+
11146
|
6711 |
+
11147
|
6712 |
+
11149
|
6713 |
+
11154
|
6714 |
+
11157
|
6715 |
+
11158
|
6716 |
+
839
|
6717 |
+
11160
|
6718 |
+
11161
|
6719 |
+
840
|
6720 |
+
11162
|
6721 |
+
11163
|
6722 |
+
11167
|
6723 |
+
11170
|
6724 |
+
11171
|
6725 |
+
11174
|
6726 |
+
11175
|
6727 |
+
841
|
6728 |
+
11177
|
6729 |
+
11179
|
6730 |
+
842
|
6731 |
+
11180
|
6732 |
+
843
|
6733 |
+
844
|
6734 |
+
11182
|
6735 |
+
11183
|
6736 |
+
11185
|
6737 |
+
11186
|
6738 |
+
11187
|
6739 |
+
11188
|
6740 |
+
11190
|
6741 |
+
11193
|
6742 |
+
11194
|
6743 |
+
11195
|
6744 |
+
11198
|
6745 |
+
845
|
6746 |
+
11199
|
6747 |
+
11202
|
6748 |
+
11204
|
6749 |
+
11205
|
6750 |
+
11206
|
6751 |
+
11207
|
6752 |
+
846
|
6753 |
+
11208
|
6754 |
+
11209
|
6755 |
+
11211
|
6756 |
+
11212
|
6757 |
+
11213
|
6758 |
+
11214
|
6759 |
+
11216
|
6760 |
+
11218
|
6761 |
+
11219
|
6762 |
+
11220
|
6763 |
+
11221
|
6764 |
+
11223
|
6765 |
+
11225
|
6766 |
+
11226
|
6767 |
+
11229
|
6768 |
+
11230
|
6769 |
+
11231
|
6770 |
+
11233
|
6771 |
+
11234
|
6772 |
+
11235
|
6773 |
+
11236
|
6774 |
+
11237
|
6775 |
+
11238
|
6776 |
+
11240
|
6777 |
+
11241
|
6778 |
+
11246
|
6779 |
+
847
|
6780 |
+
11247
|
6781 |
+
11248
|
6782 |
+
11249
|
6783 |
+
11250
|
6784 |
+
11253
|
6785 |
+
11254
|
6786 |
+
11255
|
6787 |
+
11256
|
6788 |
+
11257
|
6789 |
+
11258
|
6790 |
+
11259
|
6791 |
+
11260
|
6792 |
+
848
|
6793 |
+
11261
|
6794 |
+
11263
|
6795 |
+
11264
|
6796 |
+
11265
|
6797 |
+
11269
|
6798 |
+
11270
|
6799 |
+
11271
|
6800 |
+
11273
|
6801 |
+
11276
|
6802 |
+
11278
|
6803 |
+
11279
|
6804 |
+
11280
|
6805 |
+
11283
|
6806 |
+
11284
|
6807 |
+
11285
|
6808 |
+
849
|
6809 |
+
11287
|
6810 |
+
11288
|
6811 |
+
11289
|
6812 |
+
11290
|
6813 |
+
11292
|
6814 |
+
11293
|
6815 |
+
11294
|
6816 |
+
11295
|
6817 |
+
11296
|
6818 |
+
11298
|
6819 |
+
11300
|
6820 |
+
11301
|
6821 |
+
11302
|
6822 |
+
11303
|
6823 |
+
11304
|
6824 |
+
11305
|
6825 |
+
11307
|
6826 |
+
11308
|
6827 |
+
11309
|
6828 |
+
11310
|
6829 |
+
11311
|
6830 |
+
11312
|
6831 |
+
11313
|
6832 |
+
11314
|
6833 |
+
851
|
6834 |
+
11317
|
6835 |
+
11318
|
6836 |
+
11319
|
6837 |
+
11320
|
6838 |
+
11321
|
6839 |
+
11322
|
6840 |
+
11325
|
6841 |
+
11327
|
6842 |
+
11328
|
6843 |
+
11329
|
6844 |
+
11330
|
6845 |
+
11331
|
6846 |
+
11333
|
6847 |
+
852
|
6848 |
+
11334
|
6849 |
+
11335
|
6850 |
+
11337
|
6851 |
+
11345
|
6852 |
+
11348
|
6853 |
+
11350
|
6854 |
+
11353
|
6855 |
+
11355
|
6856 |
+
11356
|
6857 |
+
11357
|
6858 |
+
11358
|
6859 |
+
11359
|
6860 |
+
11361
|
6861 |
+
11363
|
6862 |
+
11365
|
6863 |
+
11366
|
6864 |
+
11367
|
6865 |
+
853
|
6866 |
+
11368
|
6867 |
+
854
|
6868 |
+
11370
|
6869 |
+
11371
|
6870 |
+
11372
|
6871 |
+
11380
|
6872 |
+
11381
|
6873 |
+
11383
|
6874 |
+
11384
|
6875 |
+
855
|
6876 |
+
11387
|
6877 |
+
11388
|
6878 |
+
11391
|
6879 |
+
11392
|
6880 |
+
11396
|
6881 |
+
856
|
6882 |
+
11400
|
6883 |
+
857
|
6884 |
+
11402
|
6885 |
+
11404
|
6886 |
+
11405
|
6887 |
+
11408
|
6888 |
+
11411
|
6889 |
+
11412
|
6890 |
+
11415
|
6891 |
+
11416
|
6892 |
+
11417
|
6893 |
+
11418
|
6894 |
+
11419
|
6895 |
+
858
|
6896 |
+
11423
|
6897 |
+
11424
|
6898 |
+
11425
|
6899 |
+
11433
|
6900 |
+
11434
|
6901 |
+
11435
|
6902 |
+
11437
|
6903 |
+
11439
|
6904 |
+
11440
|
6905 |
+
11442
|
6906 |
+
11445
|
6907 |
+
11446
|
6908 |
+
859
|
6909 |
+
11447
|
6910 |
+
11449
|
6911 |
+
11450
|
6912 |
+
860
|
6913 |
+
11452
|
6914 |
+
11455
|
6915 |
+
11457
|
6916 |
+
11458
|
6917 |
+
11459
|
6918 |
+
11462
|
6919 |
+
11464
|
6920 |
+
11465
|
6921 |
+
11466
|
6922 |
+
11468
|
6923 |
+
11469
|
6924 |
+
861
|
6925 |
+
11470
|
6926 |
+
11472
|
6927 |
+
11473
|
6928 |
+
11477
|
6929 |
+
11478
|
6930 |
+
11479
|
6931 |
+
11481
|
6932 |
+
11482
|
6933 |
+
11483
|
6934 |
+
11485
|
6935 |
+
11487
|
6936 |
+
11488
|
6937 |
+
11489
|
6938 |
+
11490
|
6939 |
+
11492
|
6940 |
+
11493
|
6941 |
+
11494
|
6942 |
+
11495
|
6943 |
+
11499
|
6944 |
+
11501
|
6945 |
+
11503
|
6946 |
+
862
|
6947 |
+
11507
|
6948 |
+
11509
|
6949 |
+
11511
|
6950 |
+
11512
|
6951 |
+
863
|
6952 |
+
11514
|
6953 |
+
11515
|
6954 |
+
11518
|
6955 |
+
11519
|
6956 |
+
11520
|
6957 |
+
11521
|
6958 |
+
11522
|
6959 |
+
11523
|
6960 |
+
11524
|
6961 |
+
864
|
6962 |
+
11525
|
6963 |
+
11526
|
6964 |
+
865
|
6965 |
+
11528
|
6966 |
+
11530
|
6967 |
+
11531
|
6968 |
+
11532
|
6969 |
+
11534
|
6970 |
+
866
|
6971 |
+
11535
|
6972 |
+
11536
|
6973 |
+
11537
|
6974 |
+
11538
|
6975 |
+
867
|
6976 |
+
11541
|
6977 |
+
11542
|
6978 |
+
11544
|
6979 |
+
11546
|
6980 |
+
11548
|
6981 |
+
11550
|
6982 |
+
11553
|
6983 |
+
11556
|
6984 |
+
11559
|
6985 |
+
11560
|
6986 |
+
11561
|
6987 |
+
11563
|
6988 |
+
11565
|
6989 |
+
11568
|
6990 |
+
868
|
6991 |
+
11570
|
6992 |
+
11571
|
6993 |
+
11572
|
6994 |
+
11573
|
6995 |
+
11574
|
6996 |
+
11578
|
6997 |
+
869
|
6998 |
+
11584
|
6999 |
+
11585
|
7000 |
+
11586
|
7001 |
+
11590
|
7002 |
+
11594
|
7003 |
+
11595
|
7004 |
+
870
|
7005 |
+
11597
|
7006 |
+
871
|
7007 |
+
11598
|
7008 |
+
11600
|
7009 |
+
872
|
7010 |
+
11601
|
7011 |
+
11604
|
7012 |
+
873
|
7013 |
+
11605
|
7014 |
+
11608
|
7015 |
+
874
|
7016 |
+
875
|
7017 |
+
11609
|
7018 |
+
11611
|
7019 |
+
11612
|
7020 |
+
11613
|
7021 |
+
11614
|
7022 |
+
11615
|
7023 |
+
11616
|
7024 |
+
11617
|
7025 |
+
11618
|
7026 |
+
11619
|
7027 |
+
11621
|
7028 |
+
11622
|
7029 |
+
11623
|
7030 |
+
11625
|
7031 |
+
11627
|
7032 |
+
11628
|
7033 |
+
876
|
7034 |
+
11631
|
7035 |
+
11637
|
7036 |
+
11638
|
7037 |
+
11639
|
7038 |
+
11640
|
7039 |
+
11641
|
7040 |
+
11643
|
7041 |
+
11644
|
7042 |
+
11645
|
7043 |
+
11647
|
7044 |
+
11650
|
7045 |
+
11652
|
7046 |
+
11655
|
7047 |
+
877
|
7048 |
+
11659
|
7049 |
+
11660
|
7050 |
+
11661
|
7051 |
+
11662
|
7052 |
+
11663
|
7053 |
+
11664
|
7054 |
+
11665
|
7055 |
+
11666
|
7056 |
+
11671
|
7057 |
+
11673
|
7058 |
+
11676
|
7059 |
+
11678
|
7060 |
+
11679
|
7061 |
+
878
|
7062 |
+
11681
|
7063 |
+
11683
|
7064 |
+
11684
|
7065 |
+
879
|
7066 |
+
11690
|
7067 |
+
11691
|
7068 |
+
11692
|
7069 |
+
11693
|
7070 |
+
11694
|
7071 |
+
880
|
7072 |
+
11696
|
7073 |
+
11697
|
7074 |
+
11698
|
7075 |
+
11699
|
7076 |
+
11701
|
7077 |
+
881
|
7078 |
+
11706
|
7079 |
+
11707
|
7080 |
+
11708
|
7081 |
+
11709
|
7082 |
+
11710
|
7083 |
+
11711
|
7084 |
+
882
|
7085 |
+
11713
|
7086 |
+
11714
|
7087 |
+
11715
|
7088 |
+
11716
|
7089 |
+
11717
|
7090 |
+
11721
|
7091 |
+
11722
|
7092 |
+
11723
|
7093 |
+
11726
|
7094 |
+
883
|
7095 |
+
884
|
7096 |
+
11728
|
7097 |
+
11729
|
7098 |
+
11730
|
7099 |
+
11733
|
7100 |
+
885
|
7101 |
+
11734
|
7102 |
+
886
|
7103 |
+
11735
|
7104 |
+
11736
|
7105 |
+
11737
|
7106 |
+
11740
|
7107 |
+
11741
|
7108 |
+
11742
|
7109 |
+
11743
|
7110 |
+
11744
|
7111 |
+
11745
|
7112 |
+
11746
|
7113 |
+
11747
|
7114 |
+
11749
|
7115 |
+
11750
|
7116 |
+
11751
|
7117 |
+
887
|
7118 |
+
11753
|
7119 |
+
888
|
7120 |
+
11755
|
7121 |
+
11757
|
7122 |
+
11758
|
7123 |
+
11759
|
7124 |
+
11760
|
7125 |
+
11761
|
7126 |
+
11762
|
7127 |
+
11763
|
7128 |
+
11764
|
7129 |
+
11765
|
7130 |
+
11766
|
7131 |
+
11767
|
7132 |
+
11768
|
7133 |
+
11769
|
7134 |
+
11770
|
7135 |
+
11771
|
7136 |
+
11773
|
7137 |
+
889
|
7138 |
+
11778
|
7139 |
+
11780
|
7140 |
+
11781
|
7141 |
+
11783
|
7142 |
+
890
|
7143 |
+
11784
|
7144 |
+
11788
|
7145 |
+
11791
|
7146 |
+
11792
|
7147 |
+
11796
|
7148 |
+
11797
|
7149 |
+
891
|
7150 |
+
11798
|
7151 |
+
11800
|
7152 |
+
11801
|
7153 |
+
11803
|
7154 |
+
11804
|
7155 |
+
11805
|
7156 |
+
11806
|
7157 |
+
11808
|
7158 |
+
11809
|
7159 |
+
11812
|
7160 |
+
11813
|
7161 |
+
11816
|
7162 |
+
892
|
7163 |
+
893
|
7164 |
+
11817
|
7165 |
+
11818
|
7166 |
+
11819
|
7167 |
+
11821
|
7168 |
+
894
|
7169 |
+
11823
|
7170 |
+
895
|
7171 |
+
11827
|
7172 |
+
11829
|
7173 |
+
896
|
7174 |
+
11830
|
7175 |
+
11831
|
7176 |
+
897
|
7177 |
+
11832
|
7178 |
+
11834
|
7179 |
+
11835
|
7180 |
+
11836
|
7181 |
+
11837
|
7182 |
+
11838
|
7183 |
+
11839
|
7184 |
+
11840
|
7185 |
+
11842
|
7186 |
+
898
|
7187 |
+
11845
|
7188 |
+
11848
|
7189 |
+
11851
|
7190 |
+
11852
|
7191 |
+
11854
|
7192 |
+
11855
|
7193 |
+
11857
|
7194 |
+
11858
|
7195 |
+
899
|
7196 |
+
11861
|
7197 |
+
11863
|
7198 |
+
11864
|
7199 |
+
11865
|
7200 |
+
11869
|
7201 |
+
11870
|
7202 |
+
900
|
7203 |
+
11872
|
7204 |
+
11873
|
7205 |
+
11876
|
7206 |
+
11877
|
7207 |
+
11879
|
7208 |
+
11880
|
7209 |
+
11881
|
7210 |
+
11882
|
7211 |
+
11886
|
7212 |
+
11887
|
7213 |
+
11889
|
7214 |
+
11890
|
7215 |
+
11892
|
7216 |
+
11894
|
7217 |
+
11897
|
7218 |
+
11898
|
7219 |
+
11899
|
7220 |
+
11900
|
7221 |
+
11901
|
7222 |
+
11903
|
7223 |
+
11908
|
7224 |
+
11909
|
7225 |
+
11910
|
7226 |
+
11913
|
7227 |
+
11914
|
7228 |
+
11918
|
7229 |
+
11919
|
7230 |
+
11920
|
7231 |
+
11921
|
7232 |
+
11922
|
7233 |
+
11926
|
7234 |
+
11928
|
7235 |
+
11934
|
7236 |
+
11935
|
7237 |
+
901
|
7238 |
+
11936
|
7239 |
+
902
|
7240 |
+
11937
|
7241 |
+
11938
|
7242 |
+
11939
|
7243 |
+
11940
|
7244 |
+
11942
|
7245 |
+
11943
|
7246 |
+
11944
|
7247 |
+
11945
|
7248 |
+
11946
|
7249 |
+
11948
|
7250 |
+
11949
|
7251 |
+
903
|
7252 |
+
11953
|
7253 |
+
11954
|
7254 |
+
11958
|
7255 |
+
11959
|
7256 |
+
11960
|
7257 |
+
11961
|
7258 |
+
11962
|
7259 |
+
11963
|
7260 |
+
11964
|
7261 |
+
11965
|
7262 |
+
11966
|
7263 |
+
11967
|
7264 |
+
11968
|
7265 |
+
11969
|
7266 |
+
11971
|
7267 |
+
904
|
7268 |
+
11972
|
7269 |
+
905
|
7270 |
+
11973
|
7271 |
+
11974
|
7272 |
+
11975
|
7273 |
+
11976
|
7274 |
+
11977
|
7275 |
+
906
|
7276 |
+
11980
|
7277 |
+
907
|
7278 |
+
11982
|
7279 |
+
11983
|
7280 |
+
11984
|
7281 |
+
11986
|
7282 |
+
908
|
7283 |
+
11988
|
7284 |
+
11989
|
7285 |
+
11990
|
7286 |
+
11991
|
7287 |
+
11993
|
7288 |
+
11994
|
7289 |
+
11995
|
7290 |
+
11996
|
7291 |
+
11998
|
7292 |
+
12000
|
7293 |
+
12001
|
7294 |
+
12005
|
7295 |
+
909
|
7296 |
+
12008
|
7297 |
+
12010
|
7298 |
+
12011
|
7299 |
+
12012
|
7300 |
+
910
|
7301 |
+
12014
|
7302 |
+
12016
|
7303 |
+
12017
|
7304 |
+
12018
|
7305 |
+
911
|
7306 |
+
12019
|
7307 |
+
12021
|
7308 |
+
12025
|
7309 |
+
12027
|
7310 |
+
12028
|
7311 |
+
12029
|
7312 |
+
12030
|
7313 |
+
912
|
7314 |
+
12034
|
7315 |
+
12035
|
7316 |
+
12037
|
7317 |
+
12038
|
7318 |
+
913
|
7319 |
+
12039
|
7320 |
+
12040
|
7321 |
+
12041
|
7322 |
+
12044
|
7323 |
+
12046
|
7324 |
+
12047
|
7325 |
+
12048
|
7326 |
+
12050
|
7327 |
+
12051
|
7328 |
+
12052
|
7329 |
+
12054
|
7330 |
+
12056
|
7331 |
+
12057
|
7332 |
+
12058
|
7333 |
+
12061
|
7334 |
+
12062
|
7335 |
+
12065
|
7336 |
+
914
|
7337 |
+
12067
|
7338 |
+
915
|
7339 |
+
12069
|
7340 |
+
12071
|
7341 |
+
12074
|
7342 |
+
12075
|
7343 |
+
12078
|
7344 |
+
12079
|
7345 |
+
12080
|
7346 |
+
12084
|
7347 |
+
12086
|
7348 |
+
12093
|
7349 |
+
12094
|
7350 |
+
12095
|
7351 |
+
12096
|
7352 |
+
12097
|
7353 |
+
12098
|
7354 |
+
12099
|
7355 |
+
12100
|
7356 |
+
12102
|
7357 |
+
12103
|
7358 |
+
12104
|
7359 |
+
12105
|
7360 |
+
12106
|
7361 |
+
12107
|
7362 |
+
12108
|
7363 |
+
12109
|
7364 |
+
12110
|
7365 |
+
12111
|
7366 |
+
12112
|
7367 |
+
12113
|
7368 |
+
12115
|
7369 |
+
12116
|
7370 |
+
12117
|
7371 |
+
12118
|
7372 |
+
12119
|
7373 |
+
12120
|
7374 |
+
12121
|
7375 |
+
12122
|
7376 |
+
12123
|
7377 |
+
12124
|
7378 |
+
12125
|
7379 |
+
12126
|
7380 |
+
12128
|
7381 |
+
12129
|
7382 |
+
12131
|
7383 |
+
12132
|
7384 |
+
12133
|
7385 |
+
12134
|
7386 |
+
12135
|
7387 |
+
12136
|
7388 |
+
12138
|
7389 |
+
12139
|
7390 |
+
12140
|
7391 |
+
12142
|
7392 |
+
12145
|
7393 |
+
12147
|
7394 |
+
12148
|
7395 |
+
12149
|
7396 |
+
12151
|
7397 |
+
12153
|
7398 |
+
12155
|
7399 |
+
12158
|
7400 |
+
12159
|
7401 |
+
12165
|
7402 |
+
12168
|
7403 |
+
12169
|
7404 |
+
12172
|
7405 |
+
12174
|
7406 |
+
12175
|
7407 |
+
12176
|
7408 |
+
12177
|
7409 |
+
12178
|
7410 |
+
12179
|
7411 |
+
12180
|
7412 |
+
12181
|
7413 |
+
12183
|
7414 |
+
12189
|
7415 |
+
12191
|
7416 |
+
12193
|
7417 |
+
12194
|
7418 |
+
12196
|
7419 |
+
12205
|
7420 |
+
12207
|
7421 |
+
12209
|
7422 |
+
12215
|
7423 |
+
12229
|
7424 |
+
12231
|
7425 |
+
12232
|
7426 |
+
12238
|
7427 |
+
12239
|
7428 |
+
12241
|
7429 |
+
12243
|
7430 |
+
12247
|
7431 |
+
12254
|
7432 |
+
12258
|
7433 |
+
12261
|
7434 |
+
12262
|
7435 |
+
12263
|
7436 |
+
12264
|
7437 |
+
12265
|
7438 |
+
12266
|
7439 |
+
12267
|
7440 |
+
12268
|
7441 |
+
12269
|
7442 |
+
12270
|
7443 |
+
12271
|
7444 |
+
12274
|
7445 |
+
12275
|
7446 |
+
12276
|
7447 |
+
12278
|
7448 |
+
12282
|
7449 |
+
12284
|
7450 |
+
12285
|
7451 |
+
12286
|
7452 |
+
12287
|
7453 |
+
12288
|
7454 |
+
12291
|
7455 |
+
12292
|
7456 |
+
12293
|
7457 |
+
12294
|
7458 |
+
12295
|
7459 |
+
916
|
7460 |
+
12298
|
7461 |
+
12300
|
7462 |
+
12301
|
7463 |
+
12305
|
7464 |
+
12308
|
7465 |
+
12310
|
7466 |
+
917
|
7467 |
+
12312
|
7468 |
+
12313
|
7469 |
+
12314
|
7470 |
+
918
|
7471 |
+
12321
|
7472 |
+
919
|
7473 |
+
920
|
7474 |
+
12322
|
7475 |
+
12323
|
7476 |
+
12324
|
7477 |
+
12325
|
7478 |
+
921
|
7479 |
+
12326
|
7480 |
+
12327
|
7481 |
+
12328
|
7482 |
+
12329
|
7483 |
+
12331
|
7484 |
+
12333
|
7485 |
+
12334
|
7486 |
+
12335
|
7487 |
+
12337
|
7488 |
+
12338
|
7489 |
+
12339
|
7490 |
+
12340
|
7491 |
+
12341
|
7492 |
+
12342
|
7493 |
+
12346
|
7494 |
+
12350
|
7495 |
+
12354
|
7496 |
+
12359
|
7497 |
+
12360
|
7498 |
+
922
|
7499 |
+
12361
|
7500 |
+
12362
|
7501 |
+
12368
|
7502 |
+
12374
|
7503 |
+
12375
|
7504 |
+
12379
|
7505 |
+
12381
|
7506 |
+
12383
|
7507 |
+
12386
|
7508 |
+
12390
|
7509 |
+
12392
|
7510 |
+
12394
|
7511 |
+
12396
|
7512 |
+
12399
|
7513 |
+
12400
|
7514 |
+
12401
|
7515 |
+
12402
|
7516 |
+
12403
|
7517 |
+
12404
|
7518 |
+
12405
|
7519 |
+
12406
|
7520 |
+
12408
|
7521 |
+
12409
|
7522 |
+
12410
|
7523 |
+
12411
|
7524 |
+
12412
|
7525 |
+
12413
|
7526 |
+
12415
|
7527 |
+
12417
|
7528 |
+
12418
|
7529 |
+
12419
|
7530 |
+
12423
|
7531 |
+
12424
|
7532 |
+
12425
|
7533 |
+
923
|
7534 |
+
12426
|
7535 |
+
12427
|
7536 |
+
12428
|
7537 |
+
12429
|
7538 |
+
12430
|
7539 |
+
12431
|
7540 |
+
12432
|
7541 |
+
12433
|
7542 |
+
12435
|
7543 |
+
12436
|
7544 |
+
12438
|
7545 |
+
12439
|
7546 |
+
12441
|
7547 |
+
12443
|
7548 |
+
924
|
7549 |
+
925
|
7550 |
+
12449
|
7551 |
+
12450
|
7552 |
+
12451
|
7553 |
+
12455
|
7554 |
+
12456
|
7555 |
+
12458
|
7556 |
+
12460
|
7557 |
+
12461
|
7558 |
+
12463
|
7559 |
+
12465
|
7560 |
+
12467
|
7561 |
+
12468
|
7562 |
+
12469
|
7563 |
+
12470
|
7564 |
+
12471
|
7565 |
+
12473
|
7566 |
+
12474
|
7567 |
+
12475
|
7568 |
+
12476
|
7569 |
+
12478
|
7570 |
+
12479
|
7571 |
+
12480
|
7572 |
+
12481
|
7573 |
+
12482
|
7574 |
+
12483
|
7575 |
+
12485
|
7576 |
+
12486
|
7577 |
+
12487
|
7578 |
+
12488
|
7579 |
+
12494
|
7580 |
+
12495
|
7581 |
+
926
|
7582 |
+
12496
|
7583 |
+
12499
|
7584 |
+
12503
|
7585 |
+
12504
|
7586 |
+
12506
|
7587 |
+
12507
|
7588 |
+
12509
|
7589 |
+
12510
|
7590 |
+
12511
|
7591 |
+
12512
|
7592 |
+
12514
|
7593 |
+
12516
|
7594 |
+
12517
|
7595 |
+
12520
|
7596 |
+
12526
|
7597 |
+
12528
|
7598 |
+
12530
|
7599 |
+
12536
|
7600 |
+
12537
|
7601 |
+
12538
|
7602 |
+
12540
|
7603 |
+
12541
|
7604 |
+
12544
|
7605 |
+
12547
|
7606 |
+
12554
|
7607 |
+
12555
|
7608 |
+
12556
|
7609 |
+
12557
|
7610 |
+
12560
|
7611 |
+
12564
|
7612 |
+
12566
|
7613 |
+
12567
|
7614 |
+
12568
|
7615 |
+
12570
|
7616 |
+
12571
|
7617 |
+
12574
|
7618 |
+
12575
|
7619 |
+
12576
|
7620 |
+
12578
|
7621 |
+
12579
|
7622 |
+
12584
|
7623 |
+
12585
|
7624 |
+
12586
|
7625 |
+
12589
|
7626 |
+
12592
|
7627 |
+
12595
|
7628 |
+
12596
|
7629 |
+
12598
|
7630 |
+
12599
|
7631 |
+
12603
|
7632 |
+
12606
|
7633 |
+
12608
|
7634 |
+
12610
|
7635 |
+
12612
|
7636 |
+
12615
|
7637 |
+
12618
|
7638 |
+
12619
|
7639 |
+
12620
|
7640 |
+
12621
|
7641 |
+
12623
|
7642 |
+
12624
|
7643 |
+
12625
|
7644 |
+
12627
|
7645 |
+
12629
|
7646 |
+
12630
|
7647 |
+
12632
|
7648 |
+
927
|
7649 |
+
12634
|
7650 |
+
12636
|
7651 |
+
928
|
7652 |
+
12638
|
7653 |
+
12639
|
7654 |
+
12641
|
7655 |
+
12642
|
7656 |
+
12643
|
7657 |
+
12644
|
7658 |
+
12645
|
7659 |
+
929
|
7660 |
+
12647
|
7661 |
+
12650
|
7662 |
+
12651
|
7663 |
+
12652
|
7664 |
+
12653
|
7665 |
+
12655
|
7666 |
+
12660
|
7667 |
+
12661
|
7668 |
+
12663
|
7669 |
+
12664
|
7670 |
+
12665
|
7671 |
+
12667
|
7672 |
+
12671
|
7673 |
+
12672
|
7674 |
+
12674
|
7675 |
+
12681
|
7676 |
+
12684
|
7677 |
+
12685
|
7678 |
+
12686
|
7679 |
+
12687
|
7680 |
+
12689
|
7681 |
+
12690
|
7682 |
+
12692
|
7683 |
+
12693
|
7684 |
+
12695
|
7685 |
+
12696
|
7686 |
+
12697
|
7687 |
+
12698
|
7688 |
+
12699
|
7689 |
+
12700
|
7690 |
+
12702
|
7691 |
+
12703
|
7692 |
+
12704
|
7693 |
+
12705
|
7694 |
+
12706
|
7695 |
+
12710
|
7696 |
+
12711
|
7697 |
+
12713
|
7698 |
+
12714
|
7699 |
+
12716
|
7700 |
+
12717
|
7701 |
+
12718
|
7702 |
+
12719
|
7703 |
+
12721
|
7704 |
+
12723
|
7705 |
+
12724
|
7706 |
+
12727
|
7707 |
+
12729
|
7708 |
+
12731
|
7709 |
+
12733
|
7710 |
+
12735
|
7711 |
+
12737
|
7712 |
+
12738
|
7713 |
+
12741
|
7714 |
+
12742
|
7715 |
+
12744
|
7716 |
+
12745
|
7717 |
+
12746
|
7718 |
+
12747
|
7719 |
+
12748
|
7720 |
+
12749
|
7721 |
+
12750
|
7722 |
+
12751
|
7723 |
+
12754
|
7724 |
+
12755
|
7725 |
+
12756
|
7726 |
+
12757
|
7727 |
+
930
|
7728 |
+
12758
|
7729 |
+
12759
|
7730 |
+
12761
|
7731 |
+
12762
|
7732 |
+
12763
|
7733 |
+
12764
|
7734 |
+
12766
|
7735 |
+
12768
|
7736 |
+
12769
|
7737 |
+
12770
|
7738 |
+
12771
|
7739 |
+
12772
|
7740 |
+
12773
|
7741 |
+
12777
|
7742 |
+
12778
|
7743 |
+
12779
|
7744 |
+
12780
|
7745 |
+
12781
|
7746 |
+
12782
|
7747 |
+
12783
|
7748 |
+
12784
|
7749 |
+
12786
|
7750 |
+
12789
|
7751 |
+
12791
|
7752 |
+
12792
|
7753 |
+
12798
|
7754 |
+
12799
|
7755 |
+
12800
|
7756 |
+
12801
|
7757 |
+
12802
|
7758 |
+
12803
|
7759 |
+
12804
|
7760 |
+
12805
|
7761 |
+
12806
|
7762 |
+
12807
|
7763 |
+
12808
|
7764 |
+
12809
|
7765 |
+
12810
|
7766 |
+
12811
|
7767 |
+
12812
|
7768 |
+
12813
|
7769 |
+
12818
|
7770 |
+
12820
|
7771 |
+
12821
|
7772 |
+
12823
|
7773 |
+
931
|
7774 |
+
12825
|
7775 |
+
12827
|
7776 |
+
12828
|
7777 |
+
12829
|
7778 |
+
12830
|
7779 |
+
12835
|
7780 |
+
932
|
7781 |
+
12836
|
7782 |
+
12837
|
7783 |
+
12838
|
7784 |
+
12839
|
7785 |
+
12840
|
7786 |
+
12841
|
7787 |
+
12842
|
7788 |
+
12843
|
7789 |
+
12844
|
7790 |
+
933
|
7791 |
+
934
|
7792 |
+
12846
|
7793 |
+
12847
|
7794 |
+
12848
|
7795 |
+
12849
|
7796 |
+
12850
|
7797 |
+
12851
|
7798 |
+
12852
|
7799 |
+
12853
|
7800 |
+
12855
|
7801 |
+
12856
|
7802 |
+
12858
|
7803 |
+
12859
|
7804 |
+
12860
|
7805 |
+
12861
|
7806 |
+
12863
|
7807 |
+
12865
|
7808 |
+
12866
|
7809 |
+
12867
|
7810 |
+
12869
|
7811 |
+
12871
|
7812 |
+
12872
|
7813 |
+
12873
|
7814 |
+
12874
|
7815 |
+
12875
|
7816 |
+
12876
|
7817 |
+
935
|
7818 |
+
12880
|
7819 |
+
12881
|
7820 |
+
12882
|
7821 |
+
12883
|
7822 |
+
12884
|
7823 |
+
12885
|
7824 |
+
12886
|
7825 |
+
12887
|
7826 |
+
12888
|
7827 |
+
12889
|
7828 |
+
12890
|
7829 |
+
12891
|
7830 |
+
12892
|
7831 |
+
12893
|
7832 |
+
12894
|
7833 |
+
12895
|
7834 |
+
936
|
7835 |
+
12896
|
7836 |
+
12897
|
7837 |
+
937
|
7838 |
+
938
|
7839 |
+
12898
|
7840 |
+
12899
|
7841 |
+
12900
|
7842 |
+
12901
|
7843 |
+
12902
|
7844 |
+
12903
|
7845 |
+
939
|
7846 |
+
12904
|
7847 |
+
940
|
7848 |
+
12907
|
7849 |
+
941
|
7850 |
+
942
|
7851 |
+
943
|
7852 |
+
944
|
7853 |
+
12916
|
7854 |
+
12919
|
7855 |
+
12921
|
7856 |
+
12925
|
7857 |
+
12926
|
7858 |
+
12927
|
7859 |
+
945
|
7860 |
+
12928
|
7861 |
+
12930
|
7862 |
+
12931
|
7863 |
+
12932
|
7864 |
+
12933
|
7865 |
+
12935
|
7866 |
+
12936
|
7867 |
+
12937
|
7868 |
+
12939
|
7869 |
+
12941
|
7870 |
+
12942
|
7871 |
+
12943
|
7872 |
+
12944
|
7873 |
+
12945
|
7874 |
+
12946
|
7875 |
+
12948
|
7876 |
+
12951
|
7877 |
+
12952
|
7878 |
+
12953
|
7879 |
+
12955
|
7880 |
+
12957
|
7881 |
+
12958
|
7882 |
+
12959
|
7883 |
+
12961
|
7884 |
+
12962
|
7885 |
+
12964
|
7886 |
+
12967
|
7887 |
+
12968
|
7888 |
+
12969
|
7889 |
+
12970
|
7890 |
+
12974
|
7891 |
+
12975
|
7892 |
+
12977
|
7893 |
+
12978
|
7894 |
+
12979
|
7895 |
+
12981
|
7896 |
+
12982
|
7897 |
+
12983
|
7898 |
+
12988
|
7899 |
+
12989
|
7900 |
+
12990
|
7901 |
+
12991
|
7902 |
+
946
|
7903 |
+
12992
|
7904 |
+
12993
|
7905 |
+
12994
|
7906 |
+
12996
|
7907 |
+
12997
|
7908 |
+
12998
|
7909 |
+
13000
|
7910 |
+
13002
|
7911 |
+
13003
|
7912 |
+
13004
|
7913 |
+
13005
|
7914 |
+
13009
|
7915 |
+
13010
|
7916 |
+
13011
|
7917 |
+
13015
|
7918 |
+
13016
|
7919 |
+
13017
|
7920 |
+
13019
|
7921 |
+
13020
|
7922 |
+
13021
|
7923 |
+
13022
|
7924 |
+
13023
|
7925 |
+
947
|
7926 |
+
13024
|
7927 |
+
13028
|
7928 |
+
13029
|
7929 |
+
13030
|
7930 |
+
13031
|
7931 |
+
13033
|
7932 |
+
13034
|
7933 |
+
13035
|
7934 |
+
13037
|
7935 |
+
13038
|
7936 |
+
13042
|
7937 |
+
13046
|
7938 |
+
13047
|
7939 |
+
13048
|
7940 |
+
13050
|
7941 |
+
13052
|
7942 |
+
13053
|
7943 |
+
13054
|
7944 |
+
13055
|
7945 |
+
13058
|
7946 |
+
13059
|
7947 |
+
13062
|
7948 |
+
13067
|
7949 |
+
948
|
7950 |
+
13072
|
7951 |
+
13073
|
7952 |
+
13075
|
7953 |
+
13077
|
7954 |
+
13078
|
7955 |
+
13079
|
7956 |
+
13080
|
7957 |
+
13082
|
7958 |
+
13083
|
7959 |
+
13084
|
7960 |
+
13087
|
7961 |
+
949
|
7962 |
+
13090
|
7963 |
+
13091
|
7964 |
+
13092
|
7965 |
+
13095
|
7966 |
+
950
|
7967 |
+
13097
|
7968 |
+
13098
|
7969 |
+
13099
|
7970 |
+
13100
|
7971 |
+
13101
|
7972 |
+
13102
|
7973 |
+
13103
|
7974 |
+
13105
|
7975 |
+
13106
|
7976 |
+
13107
|
7977 |
+
951
|
7978 |
+
13108
|
7979 |
+
13109
|
7980 |
+
13110
|
7981 |
+
13111
|
7982 |
+
13113
|
7983 |
+
13115
|
7984 |
+
13116
|
7985 |
+
13117
|
7986 |
+
13118
|
7987 |
+
13119
|
7988 |
+
13120
|
7989 |
+
13122
|
7990 |
+
13124
|
7991 |
+
13126
|
7992 |
+
13127
|
7993 |
+
13129
|
7994 |
+
13132
|
7995 |
+
952
|
7996 |
+
953
|
7997 |
+
954
|
7998 |
+
13134
|
7999 |
+
13135
|
8000 |
+
13138
|
8001 |
+
955
|
8002 |
+
13139
|
8003 |
+
13140
|
8004 |
+
13142
|
8005 |
+
13144
|
8006 |
+
13145
|
8007 |
+
13147
|
8008 |
+
13149
|
8009 |
+
13151
|
8010 |
+
13152
|
8011 |
+
13153
|
8012 |
+
13154
|
8013 |
+
13158
|
8014 |
+
13163
|
8015 |
+
13165
|
8016 |
+
13170
|
8017 |
+
13172
|
8018 |
+
956
|
8019 |
+
13178
|
8020 |
+
13179
|
8021 |
+
13180
|
8022 |
+
13183
|
8023 |
+
13184
|
8024 |
+
13187
|
8025 |
+
13188
|
8026 |
+
13189
|
8027 |
+
13192
|
8028 |
+
13193
|
8029 |
+
13194
|
8030 |
+
13195
|
8031 |
+
13196
|
8032 |
+
13198
|
8033 |
+
13199
|
8034 |
+
13200
|
8035 |
+
13201
|
8036 |
+
13202
|
8037 |
+
13206
|
8038 |
+
13207
|
8039 |
+
13208
|
8040 |
+
13212
|
8041 |
+
13213
|
8042 |
+
13214
|
8043 |
+
13215
|
8044 |
+
13216
|
8045 |
+
13217
|
8046 |
+
13218
|
8047 |
+
13219
|
8048 |
+
13221
|
8049 |
+
13223
|
8050 |
+
957
|
8051 |
+
13225
|
8052 |
+
13229
|
8053 |
+
13230
|
8054 |
+
13232
|
8055 |
+
13236
|
8056 |
+
13239
|
8057 |
+
13242
|
8058 |
+
13245
|
8059 |
+
13246
|
8060 |
+
13248
|
8061 |
+
13249
|
8062 |
+
13254
|
8063 |
+
13255
|
8064 |
+
13256
|
8065 |
+
13257
|
8066 |
+
13258
|
8067 |
+
13264
|
8068 |
+
13266
|
8069 |
+
13267
|
8070 |
+
13268
|
8071 |
+
13270
|
8072 |
+
13271
|
8073 |
+
958
|
8074 |
+
13274
|
8075 |
+
13276
|
8076 |
+
13277
|
8077 |
+
13278
|
8078 |
+
13282
|
8079 |
+
13287
|
8080 |
+
13288
|
8081 |
+
13289
|
8082 |
+
13290
|
8083 |
+
13291
|
8084 |
+
13296
|
8085 |
+
13297
|
8086 |
+
13298
|
8087 |
+
13300
|
8088 |
+
13301
|
8089 |
+
13302
|
8090 |
+
13303
|
8091 |
+
13305
|
8092 |
+
13306
|
8093 |
+
13307
|
8094 |
+
13308
|
8095 |
+
13309
|
8096 |
+
13310
|
8097 |
+
13311
|
8098 |
+
13312
|
8099 |
+
13313
|
8100 |
+
13318
|
8101 |
+
13321
|
8102 |
+
13322
|
8103 |
+
13325
|
8104 |
+
13327
|
8105 |
+
13328
|
8106 |
+
13339
|
8107 |
+
13340
|
8108 |
+
13341
|
8109 |
+
13342
|
8110 |
+
13343
|
8111 |
+
13347
|
8112 |
+
13348
|
8113 |
+
13349
|
8114 |
+
13351
|
8115 |
+
13352
|
8116 |
+
13353
|
8117 |
+
13354
|
8118 |
+
13355
|
8119 |
+
13357
|
8120 |
+
13358
|
8121 |
+
13359
|
8122 |
+
13360
|
8123 |
+
13364
|
8124 |
+
13366
|
8125 |
+
13367
|
8126 |
+
13369
|
8127 |
+
13370
|
8128 |
+
13371
|
8129 |
+
13372
|
8130 |
+
13373
|
8131 |
+
13375
|
8132 |
+
13377
|
8133 |
+
13378
|
8134 |
+
13380
|
8135 |
+
13381
|
8136 |
+
13382
|
8137 |
+
13383
|
8138 |
+
13385
|
8139 |
+
13387
|
8140 |
+
13389
|
8141 |
+
13390
|
8142 |
+
13391
|
8143 |
+
13393
|
8144 |
+
13394
|
8145 |
+
13395
|
8146 |
+
13397
|
8147 |
+
13398
|
8148 |
+
13399
|
8149 |
+
13401
|
8150 |
+
13403
|
8151 |
+
13405
|
8152 |
+
13406
|
8153 |
+
13410
|
8154 |
+
13413
|
8155 |
+
13417
|
8156 |
+
13419
|
8157 |
+
13420
|
8158 |
+
13422
|
8159 |
+
13423
|
8160 |
+
13426
|
8161 |
+
13427
|
8162 |
+
13428
|
8163 |
+
13429
|
8164 |
+
13431
|
8165 |
+
13432
|
8166 |
+
13437
|
8167 |
+
13439
|
8168 |
+
13440
|
8169 |
+
13441
|
8170 |
+
13443
|
8171 |
+
959
|
8172 |
+
13447
|
8173 |
+
13455
|
8174 |
+
13458
|
8175 |
+
13464
|
8176 |
+
13467
|
8177 |
+
13468
|
8178 |
+
13470
|
8179 |
+
13473
|
8180 |
+
13474
|
8181 |
+
13478
|
8182 |
+
960
|
8183 |
+
13484
|
8184 |
+
13487
|
8185 |
+
13491
|
8186 |
+
13492
|
8187 |
+
13493
|
8188 |
+
13494
|
8189 |
+
13495
|
8190 |
+
13506
|
8191 |
+
13511
|
8192 |
+
13513
|
8193 |
+
13515
|
8194 |
+
13516
|
8195 |
+
13517
|
8196 |
+
13518
|
8197 |
+
13519
|
8198 |
+
13520
|
8199 |
+
13521
|
8200 |
+
13522
|
8201 |
+
13523
|
8202 |
+
13524
|
8203 |
+
13525
|
8204 |
+
13526
|
8205 |
+
13528
|
8206 |
+
13531
|
8207 |
+
13532
|
8208 |
+
13533
|
8209 |
+
13534
|
8210 |
+
13535
|
8211 |
+
13537
|
8212 |
+
13538
|
8213 |
+
13544
|
8214 |
+
13546
|
8215 |
+
13548
|
8216 |
+
13557
|
8217 |
+
13558
|
8218 |
+
13561
|
8219 |
+
13562
|
8220 |
+
13563
|
8221 |
+
13564
|
8222 |
+
13565
|
8223 |
+
13570
|
8224 |
+
13572
|
8225 |
+
13573
|
8226 |
+
13574
|
8227 |
+
13575
|
8228 |
+
13577
|
8229 |
+
13579
|
8230 |
+
13580
|
8231 |
+
13581
|
8232 |
+
13582
|
8233 |
+
13583
|
8234 |
+
13585
|
8235 |
+
13586
|
8236 |
+
13587
|
8237 |
+
13591
|
8238 |
+
13593
|
8239 |
+
13594
|
8240 |
+
13599
|
8241 |
+
13605
|
8242 |
+
13612
|
8243 |
+
13617
|
8244 |
+
13621
|
8245 |
+
13629
|
8246 |
+
13632
|
8247 |
+
13634
|
8248 |
+
13640
|
8249 |
+
13642
|
8250 |
+
13643
|
8251 |
+
13644
|
8252 |
+
13646
|
8253 |
+
13648
|
8254 |
+
13649
|
8255 |
+
13652
|
8256 |
+
961
|
8257 |
+
13653
|
8258 |
+
13656
|
8259 |
+
13658
|
8260 |
+
13659
|
8261 |
+
13660
|
8262 |
+
13661
|
8263 |
+
13662
|
8264 |
+
13663
|
8265 |
+
13664
|
8266 |
+
13669
|
8267 |
+
13670
|
8268 |
+
13672
|
8269 |
+
13674
|
8270 |
+
13679
|
8271 |
+
13680
|
8272 |
+
13681
|
8273 |
+
13682
|
8274 |
+
13683
|
8275 |
+
13687
|
8276 |
+
13688
|
8277 |
+
13689
|
8278 |
+
13690
|
8279 |
+
13692
|
8280 |
+
13693
|
8281 |
+
13694
|
8282 |
+
13695
|
8283 |
+
13696
|
8284 |
+
13697
|
8285 |
+
13698
|
8286 |
+
13699
|
8287 |
+
13702
|
8288 |
+
13703
|
8289 |
+
13704
|
8290 |
+
13706
|
8291 |
+
13707
|
8292 |
+
13709
|
8293 |
+
13710
|
8294 |
+
13711
|
8295 |
+
13712
|
8296 |
+
13713
|
8297 |
+
13715
|
8298 |
+
13716
|
8299 |
+
13717
|
8300 |
+
13721
|
8301 |
+
13723
|
8302 |
+
13725
|
8303 |
+
13727
|
8304 |
+
962
|
8305 |
+
13728
|
8306 |
+
13730
|
8307 |
+
13732
|
8308 |
+
13733
|
8309 |
+
963
|
8310 |
+
13735
|
8311 |
+
13736
|
8312 |
+
13737
|
8313 |
+
13738
|
8314 |
+
13739
|
8315 |
+
13742
|
8316 |
+
964
|
8317 |
+
13746
|
8318 |
+
13747
|
8319 |
+
13748
|
8320 |
+
13752
|
8321 |
+
13756
|
8322 |
+
13759
|
8323 |
+
13760
|
8324 |
+
13761
|
8325 |
+
13762
|
8326 |
+
13763
|
8327 |
+
13767
|
8328 |
+
13768
|
8329 |
+
13769
|
8330 |
+
13770
|
8331 |
+
13771
|
8332 |
+
13772
|
8333 |
+
13773
|
8334 |
+
13775
|
8335 |
+
13777
|
8336 |
+
13778
|
8337 |
+
13779
|
8338 |
+
13780
|
8339 |
+
13781
|
8340 |
+
13782
|
8341 |
+
13783
|
8342 |
+
965
|
8343 |
+
13785
|
8344 |
+
13786
|
8345 |
+
13789
|
8346 |
+
13790
|
8347 |
+
13791
|
8348 |
+
13793
|
8349 |
+
13795
|
8350 |
+
13799
|
8351 |
+
13800
|
8352 |
+
13801
|
8353 |
+
13802
|
8354 |
+
13804
|
8355 |
+
13805
|
8356 |
+
13806
|
8357 |
+
13807
|
8358 |
+
13808
|
8359 |
+
13809
|
8360 |
+
13810
|
8361 |
+
13811
|
8362 |
+
13812
|
8363 |
+
13814
|
8364 |
+
13816
|
8365 |
+
13817
|
8366 |
+
13820
|
8367 |
+
13821
|
8368 |
+
13822
|
8369 |
+
13824
|
8370 |
+
13825
|
8371 |
+
13826
|
8372 |
+
13827
|
8373 |
+
13829
|
8374 |
+
13833
|
8375 |
+
13834
|
8376 |
+
13835
|
8377 |
+
13836
|
8378 |
+
13837
|
8379 |
+
966
|
8380 |
+
13838
|
8381 |
+
13839
|
8382 |
+
13841
|
8383 |
+
13842
|
8384 |
+
13844
|
8385 |
+
13845
|
8386 |
+
13846
|
8387 |
+
13847
|
8388 |
+
13848
|
8389 |
+
13849
|
8390 |
+
13850
|
8391 |
+
13851
|
8392 |
+
13852
|
8393 |
+
13853
|
8394 |
+
13854
|
8395 |
+
13855
|
8396 |
+
13856
|
8397 |
+
13857
|
8398 |
+
13858
|
8399 |
+
13859
|
8400 |
+
13860
|
8401 |
+
13861
|
8402 |
+
13864
|
8403 |
+
13865
|
8404 |
+
13866
|
8405 |
+
13867
|
8406 |
+
13868
|
8407 |
+
13870
|
8408 |
+
13871
|
8409 |
+
13872
|
8410 |
+
13873
|
8411 |
+
13874
|
8412 |
+
13875
|
8413 |
+
13876
|
8414 |
+
13877
|
8415 |
+
13878
|
8416 |
+
13879
|
8417 |
+
13880
|
8418 |
+
13882
|
8419 |
+
13883
|
8420 |
+
13884
|
8421 |
+
13885
|
8422 |
+
13886
|
8423 |
+
13887
|
8424 |
+
13888
|
8425 |
+
13889
|
8426 |
+
13891
|
8427 |
+
13892
|
8428 |
+
13893
|
8429 |
+
13894
|
8430 |
+
13895
|
8431 |
+
13896
|
8432 |
+
13897
|
8433 |
+
13898
|
8434 |
+
13899
|
8435 |
+
13900
|
8436 |
+
13901
|
8437 |
+
13902
|
8438 |
+
13905
|
8439 |
+
13906
|
8440 |
+
13907
|
8441 |
+
13908
|
8442 |
+
13909
|
8443 |
+
13910
|
8444 |
+
13911
|
8445 |
+
13912
|
8446 |
+
13913
|
8447 |
+
13915
|
8448 |
+
13916
|
8449 |
+
13917
|
8450 |
+
13918
|
8451 |
+
13919
|
8452 |
+
13920
|
8453 |
+
13921
|
8454 |
+
13922
|
8455 |
+
13923
|
8456 |
+
13924
|
8457 |
+
13927
|
8458 |
+
13928
|
8459 |
+
13929
|
8460 |
+
13930
|
8461 |
+
13931
|
8462 |
+
13932
|
8463 |
+
13933
|
8464 |
+
13934
|
8465 |
+
13935
|
8466 |
+
13936
|
8467 |
+
13937
|
8468 |
+
13938
|
8469 |
+
13939
|
8470 |
+
13940
|
8471 |
+
13941
|
8472 |
+
13942
|
8473 |
+
13943
|
8474 |
+
13945
|
8475 |
+
13949
|
8476 |
+
13953
|
8477 |
+
13954
|
8478 |
+
13956
|
8479 |
+
13957
|
8480 |
+
13958
|
8481 |
+
13962
|
8482 |
+
13963
|
8483 |
+
13964
|
8484 |
+
13966
|
8485 |
+
13969
|
8486 |
+
13973
|
8487 |
+
13974
|
8488 |
+
13975
|
8489 |
+
13976
|
8490 |
+
13977
|
8491 |
+
13978
|
8492 |
+
13980
|
8493 |
+
13982
|
8494 |
+
13983
|
8495 |
+
13986
|
8496 |
+
13987
|
8497 |
+
13989
|
8498 |
+
13990
|
8499 |
+
13991
|
8500 |
+
13992
|
8501 |
+
13993
|
8502 |
+
13995
|
8503 |
+
13996
|
8504 |
+
13997
|
8505 |
+
13998
|
8506 |
+
13999
|
8507 |
+
14003
|
8508 |
+
14004
|
8509 |
+
14005
|
8510 |
+
14007
|
8511 |
+
14009
|
8512 |
+
14010
|
8513 |
+
14011
|
8514 |
+
967
|
8515 |
+
14015
|
8516 |
+
14016
|
8517 |
+
14017
|
8518 |
+
14018
|
8519 |
+
14019
|
8520 |
+
14020
|
8521 |
+
14022
|
8522 |
+
14024
|
8523 |
+
14025
|
8524 |
+
14026
|
8525 |
+
14028
|
8526 |
+
14029
|
8527 |
+
14030
|
8528 |
+
14032
|
8529 |
+
14034
|
8530 |
+
14035
|
8531 |
+
14036
|
8532 |
+
14038
|
8533 |
+
14039
|
8534 |
+
14041
|
8535 |
+
14042
|
8536 |
+
14043
|
8537 |
+
14044
|
8538 |
+
14045
|
8539 |
+
14048
|
8540 |
+
14049
|
8541 |
+
14052
|
8542 |
+
14053
|
8543 |
+
14054
|
8544 |
+
14057
|
8545 |
+
14058
|
8546 |
+
14060
|
8547 |
+
14061
|
8548 |
+
14063
|
8549 |
+
14064
|
8550 |
+
14065
|
8551 |
+
14067
|
8552 |
+
14068
|
8553 |
+
14070
|
8554 |
+
14071
|
8555 |
+
14072
|
8556 |
+
14074
|
8557 |
+
14075
|
8558 |
+
14076
|
8559 |
+
14078
|
8560 |
+
14080
|
8561 |
+
14081
|
8562 |
+
14082
|
8563 |
+
968
|
8564 |
+
14083
|
8565 |
+
14086
|
8566 |
+
14087
|
8567 |
+
14089
|
8568 |
+
969
|
8569 |
+
14094
|
8570 |
+
14095
|
8571 |
+
14096
|
8572 |
+
14099
|
8573 |
+
14103
|
8574 |
+
14109
|
8575 |
+
14112
|
8576 |
+
14113
|
8577 |
+
14114
|
8578 |
+
14115
|
8579 |
+
14118
|
8580 |
+
14120
|
8581 |
+
14121
|
8582 |
+
14125
|
8583 |
+
14127
|
8584 |
+
14128
|
8585 |
+
14129
|
8586 |
+
14130
|
8587 |
+
14131
|
8588 |
+
14132
|
8589 |
+
14133
|
8590 |
+
14134
|
8591 |
+
14136
|
8592 |
+
14138
|
8593 |
+
14139
|
8594 |
+
14141
|
8595 |
+
14142
|
8596 |
+
14145
|
8597 |
+
14146
|
8598 |
+
14147
|
8599 |
+
14152
|
8600 |
+
14156
|
8601 |
+
14157
|
8602 |
+
14159
|
8603 |
+
14161
|
8604 |
+
14162
|
8605 |
+
14165
|
8606 |
+
14166
|
8607 |
+
14167
|
8608 |
+
14170
|
8609 |
+
14174
|
8610 |
+
14178
|
8611 |
+
14180
|
8612 |
+
14181
|
8613 |
+
14183
|
8614 |
+
14184
|
8615 |
+
14185
|
8616 |
+
14186
|
8617 |
+
14189
|
8618 |
+
14192
|
8619 |
+
14194
|
8620 |
+
14195
|
8621 |
+
14198
|
8622 |
+
14201
|
8623 |
+
14202
|
8624 |
+
14206
|
8625 |
+
14207
|
8626 |
+
14208
|
8627 |
+
14209
|
8628 |
+
14210
|
8629 |
+
14213
|
8630 |
+
14214
|
8631 |
+
14218
|
8632 |
+
14221
|
8633 |
+
14225
|
8634 |
+
14226
|
8635 |
+
14230
|
8636 |
+
14232
|
8637 |
+
14235
|
8638 |
+
14243
|
8639 |
+
14246
|
8640 |
+
14247
|
8641 |
+
970
|
8642 |
+
14249
|
8643 |
+
14250
|
8644 |
+
14253
|
8645 |
+
14254
|
8646 |
+
14255
|
8647 |
+
14256
|
8648 |
+
14258
|
8649 |
+
14266
|
8650 |
+
14267
|
8651 |
+
14268
|
8652 |
+
14269
|
8653 |
+
14270
|
8654 |
+
14272
|
8655 |
+
14274
|
8656 |
+
971
|
8657 |
+
14275
|
8658 |
+
14276
|
8659 |
+
14277
|
8660 |
+
14278
|
8661 |
+
14280
|
8662 |
+
14281
|
8663 |
+
14282
|
8664 |
+
14283
|
8665 |
+
972
|
8666 |
+
14284
|
8667 |
+
14286
|
8668 |
+
14287
|
8669 |
+
14289
|
8670 |
+
973
|
8671 |
+
14291
|
8672 |
+
14292
|
8673 |
+
14293
|
8674 |
+
14294
|
8675 |
+
14296
|
8676 |
+
14298
|
8677 |
+
14300
|
8678 |
+
14302
|
8679 |
+
14303
|
8680 |
+
14307
|
8681 |
+
14308
|
8682 |
+
14310
|
8683 |
+
14312
|
8684 |
+
14313
|
8685 |
+
14314
|
8686 |
+
14316
|
8687 |
+
14317
|
8688 |
+
14319
|
8689 |
+
974
|
8690 |
+
14320
|
8691 |
+
14323
|
8692 |
+
14324
|
8693 |
+
14326
|
8694 |
+
14328
|
8695 |
+
14329
|
8696 |
+
14330
|
8697 |
+
14331
|
8698 |
+
14332
|
8699 |
+
14334
|
8700 |
+
14335
|
8701 |
+
14336
|
8702 |
+
14338
|
8703 |
+
14339
|
8704 |
+
14344
|
8705 |
+
14348
|
8706 |
+
975
|
8707 |
+
14350
|
8708 |
+
14351
|
8709 |
+
14353
|
8710 |
+
14357
|
8711 |
+
14359
|
8712 |
+
14360
|
8713 |
+
14367
|
8714 |
+
14368
|
8715 |
+
14370
|
8716 |
+
14372
|
8717 |
+
14374
|
8718 |
+
14375
|
8719 |
+
14376
|
8720 |
+
14377
|
8721 |
+
14378
|
8722 |
+
14379
|
8723 |
+
14384
|
8724 |
+
14386
|
8725 |
+
14387
|
8726 |
+
14388
|
8727 |
+
14389
|
8728 |
+
14390
|
8729 |
+
976
|
8730 |
+
14392
|
8731 |
+
14395
|
8732 |
+
14396
|
8733 |
+
14397
|
8734 |
+
14398
|
8735 |
+
14400
|
8736 |
+
14401
|
8737 |
+
14402
|
8738 |
+
14403
|
8739 |
+
14404
|
8740 |
+
14406
|
8741 |
+
14407
|
8742 |
+
14408
|
8743 |
+
14409
|
8744 |
+
14411
|
8745 |
+
977
|
8746 |
+
978
|
8747 |
+
14416
|
8748 |
+
14418
|
8749 |
+
14421
|
8750 |
+
14422
|
8751 |
+
14423
|
8752 |
+
14424
|
8753 |
+
14425
|
8754 |
+
14426
|
8755 |
+
14427
|
8756 |
+
14428
|
8757 |
+
14430
|
8758 |
+
14431
|
8759 |
+
14432
|
8760 |
+
14433
|
8761 |
+
14434
|
8762 |
+
14435
|
8763 |
+
14436
|
8764 |
+
14437
|
8765 |
+
14438
|
8766 |
+
14439
|
8767 |
+
14440
|
8768 |
+
14442
|
8769 |
+
14443
|
8770 |
+
14444
|
8771 |
+
14445
|
8772 |
+
14446
|
8773 |
+
14449
|
8774 |
+
14452
|
8775 |
+
14453
|
8776 |
+
14457
|
8777 |
+
979
|
8778 |
+
14461
|
8779 |
+
980
|
8780 |
+
14465
|
8781 |
+
14466
|
8782 |
+
14467
|
8783 |
+
14471
|
8784 |
+
14474
|
8785 |
+
14479
|
8786 |
+
14480
|
8787 |
+
14482
|
8788 |
+
14483
|
8789 |
+
14485
|
8790 |
+
14493
|
8791 |
+
14498
|
8792 |
+
14499
|
8793 |
+
14500
|
8794 |
+
14501
|
8795 |
+
14503
|
8796 |
+
14504
|
8797 |
+
14505
|
8798 |
+
14506
|
8799 |
+
14507
|
8800 |
+
14508
|
8801 |
+
14509
|
8802 |
+
14512
|
8803 |
+
14514
|
8804 |
+
14517
|
8805 |
+
14520
|
8806 |
+
14522
|
8807 |
+
14525
|
8808 |
+
14526
|
8809 |
+
14527
|
8810 |
+
14528
|
8811 |
+
14530
|
8812 |
+
14532
|
8813 |
+
14533
|
8814 |
+
14535
|
8815 |
+
14536
|
8816 |
+
14538
|
8817 |
+
14541
|
8818 |
+
14544
|
8819 |
+
14546
|
8820 |
+
14550
|
8821 |
+
14554
|
8822 |
+
14561
|
8823 |
+
14562
|
8824 |
+
14565
|
8825 |
+
14578
|
8826 |
+
14586
|
8827 |
+
14598
|
8828 |
+
14599
|
8829 |
+
14600
|
8830 |
+
14601
|
8831 |
+
14605
|
8832 |
+
14609
|
8833 |
+
14612
|
8834 |
+
14613
|
8835 |
+
14615
|
8836 |
+
14619
|
8837 |
+
14621
|
8838 |
+
14622
|
8839 |
+
14623
|
8840 |
+
14627
|
8841 |
+
14630
|
8842 |
+
14631
|
8843 |
+
14633
|
8844 |
+
14635
|
8845 |
+
14636
|
8846 |
+
14637
|
8847 |
+
14640
|
8848 |
+
14641
|
8849 |
+
14642
|
8850 |
+
14643
|
8851 |
+
14644
|
8852 |
+
14645
|
8853 |
+
14646
|
8854 |
+
14648
|
8855 |
+
14652
|
8856 |
+
14653
|
8857 |
+
14654
|
8858 |
+
14659
|
8859 |
+
14660
|
8860 |
+
14667
|
8861 |
+
14669
|
8862 |
+
14671
|
8863 |
+
14673
|
8864 |
+
14675
|
8865 |
+
14676
|
8866 |
+
14679
|
8867 |
+
14683
|
8868 |
+
14684
|
8869 |
+
14685
|
8870 |
+
14686
|
8871 |
+
14688
|
8872 |
+
14690
|
8873 |
+
14693
|
8874 |
+
14696
|
8875 |
+
14697
|
8876 |
+
14698
|
8877 |
+
14699
|
8878 |
+
14700
|
8879 |
+
14701
|
8880 |
+
14702
|
8881 |
+
14703
|
8882 |
+
14704
|
8883 |
+
14706
|
8884 |
+
14710
|
8885 |
+
14712
|
8886 |
+
14716
|
8887 |
+
14717
|
8888 |
+
14718
|
8889 |
+
14719
|
8890 |
+
14722
|
8891 |
+
14723
|
8892 |
+
14725
|
8893 |
+
14727
|
8894 |
+
14728
|
8895 |
+
14729
|
8896 |
+
14730
|
8897 |
+
14733
|
8898 |
+
14734
|
8899 |
+
14735
|
8900 |
+
14736
|
8901 |
+
14737
|
8902 |
+
14739
|
8903 |
+
14741
|
8904 |
+
14742
|
8905 |
+
14743
|
8906 |
+
14744
|
8907 |
+
14745
|
8908 |
+
14749
|
8909 |
+
14750
|
8910 |
+
14754
|
8911 |
+
14756
|
8912 |
+
14760
|
8913 |
+
14763
|
8914 |
+
14764
|
8915 |
+
14766
|
8916 |
+
14768
|
8917 |
+
14769
|
8918 |
+
14773
|
8919 |
+
14779
|
8920 |
+
14780
|
8921 |
+
14785
|
8922 |
+
14786
|
8923 |
+
14787
|
8924 |
+
14788
|
8925 |
+
14789
|
8926 |
+
14791
|
8927 |
+
14793
|
8928 |
+
14794
|
8929 |
+
14795
|
8930 |
+
14797
|
8931 |
+
14799
|
8932 |
+
14803
|
8933 |
+
14807
|
8934 |
+
14810
|
8935 |
+
14812
|
8936 |
+
14813
|
8937 |
+
14815
|
8938 |
+
14817
|
8939 |
+
14822
|
8940 |
+
14824
|
8941 |
+
14826
|
8942 |
+
14827
|
8943 |
+
14829
|
8944 |
+
14833
|
8945 |
+
14834
|
8946 |
+
14838
|
8947 |
+
14841
|
8948 |
+
14843
|
8949 |
+
14854
|
8950 |
+
14861
|
8951 |
+
14862
|
8952 |
+
14871
|
8953 |
+
14872
|
8954 |
+
14874
|
8955 |
+
14875
|
8956 |
+
14879
|
8957 |
+
14883
|
8958 |
+
14884
|
8959 |
+
14885
|
8960 |
+
14887
|
8961 |
+
14888
|
8962 |
+
14889
|
8963 |
+
14897
|
8964 |
+
14900
|
8965 |
+
14904
|
8966 |
+
14907
|
8967 |
+
14912
|
8968 |
+
14913
|
8969 |
+
14920
|
8970 |
+
14921
|
8971 |
+
14922
|
8972 |
+
14926
|
8973 |
+
14927
|
8974 |
+
14932
|
8975 |
+
14936
|
8976 |
+
14939
|
8977 |
+
14942
|
8978 |
+
14944
|
8979 |
+
14947
|
8980 |
+
14948
|
8981 |
+
14949
|
8982 |
+
14950
|
8983 |
+
14956
|
8984 |
+
14957
|
8985 |
+
14958
|
8986 |
+
14961
|
8987 |
+
14962
|
8988 |
+
14963
|
8989 |
+
14967
|
8990 |
+
14972
|
8991 |
+
14973
|
8992 |
+
14977
|
8993 |
+
14978
|
8994 |
+
14982
|
8995 |
+
14983
|
8996 |
+
981
|
8997 |
+
14984
|
8998 |
+
14985
|
8999 |
+
14986
|
9000 |
+
14987
|
9001 |
+
14992
|
9002 |
+
14993
|
9003 |
+
14994
|
9004 |
+
14996
|
9005 |
+
14997
|
9006 |
+
14999
|
9007 |
+
15000
|
9008 |
+
15002
|
9009 |
+
15003
|
9010 |
+
15005
|
9011 |
+
15008
|
9012 |
+
15009
|
9013 |
+
15011
|
9014 |
+
15014
|
9015 |
+
15016
|
9016 |
+
15017
|
9017 |
+
15020
|
9018 |
+
15025
|
9019 |
+
15027
|
9020 |
+
15028
|
9021 |
+
15032
|
9022 |
+
15033
|
9023 |
+
15035
|
9024 |
+
15037
|
9025 |
+
15040
|
9026 |
+
15043
|
9027 |
+
15047
|
9028 |
+
15048
|
9029 |
+
15049
|
9030 |
+
15051
|
9031 |
+
15052
|
9032 |
+
15067
|
9033 |
+
15080
|
9034 |
+
15081
|
9035 |
+
15082
|
9036 |
+
15084
|
9037 |
+
15086
|
9038 |
+
15087
|
9039 |
+
15089
|
9040 |
+
15090
|
9041 |
+
15091
|
9042 |
+
15092
|
9043 |
+
15096
|
9044 |
+
15098
|
9045 |
+
15099
|
9046 |
+
15104
|
9047 |
+
15106
|
9048 |
+
15112
|
9049 |
+
15115
|
9050 |
+
15116
|
9051 |
+
15118
|
9052 |
+
15120
|
9053 |
+
15125
|
9054 |
+
15126
|
9055 |
+
15127
|
9056 |
+
15129
|
9057 |
+
15130
|
9058 |
+
15131
|
9059 |
+
15133
|
9060 |
+
15135
|
9061 |
+
15138
|
9062 |
+
15140
|
9063 |
+
15141
|
9064 |
+
15145
|
9065 |
+
15146
|
9066 |
+
15147
|
9067 |
+
15148
|
9068 |
+
15152
|
9069 |
+
15155
|
9070 |
+
15157
|
9071 |
+
15158
|
9072 |
+
15159
|
9073 |
+
15162
|
9074 |
+
15163
|
9075 |
+
15165
|
9076 |
+
15168
|
9077 |
+
15178
|
9078 |
+
15184
|
9079 |
+
15185
|
9080 |
+
15186
|
9081 |
+
15187
|
9082 |
+
15188
|
9083 |
+
15189
|
9084 |
+
15190
|
9085 |
+
15192
|
9086 |
+
15193
|
9087 |
+
15194
|
9088 |
+
15198
|
9089 |
+
15201
|
9090 |
+
15205
|
9091 |
+
15206
|
9092 |
+
15207
|
9093 |
+
15208
|
9094 |
+
15210
|
9095 |
+
15211
|
9096 |
+
15214
|
9097 |
+
15216
|
9098 |
+
15217
|
9099 |
+
15221
|
9100 |
+
15222
|
9101 |
+
15223
|
9102 |
+
15225
|
9103 |
+
15226
|
9104 |
+
15228
|
9105 |
+
15229
|
9106 |
+
15230
|
9107 |
+
15231
|
9108 |
+
15234
|
9109 |
+
15237
|
9110 |
+
15239
|
9111 |
+
15242
|
9112 |
+
15249
|
9113 |
+
15253
|
9114 |
+
15255
|
9115 |
+
15256
|
9116 |
+
15258
|
9117 |
+
15259
|
9118 |
+
15260
|
9119 |
+
15261
|
9120 |
+
15263
|
9121 |
+
15268
|
9122 |
+
15269
|
9123 |
+
15274
|
9124 |
+
15276
|
9125 |
+
15279
|
9126 |
+
15282
|
9127 |
+
15291
|
9128 |
+
15300
|
9129 |
+
15310
|
9130 |
+
15318
|
9131 |
+
15319
|
9132 |
+
15322
|
9133 |
+
15323
|
9134 |
+
15325
|
9135 |
+
15326
|
9136 |
+
15328
|
9137 |
+
15329
|
9138 |
+
15337
|
9139 |
+
15339
|
9140 |
+
15340
|
9141 |
+
15341
|
9142 |
+
15357
|
9143 |
+
15359
|
9144 |
+
15360
|
9145 |
+
15361
|
9146 |
+
15362
|
9147 |
+
15363
|
9148 |
+
15364
|
9149 |
+
15366
|
9150 |
+
15369
|
9151 |
+
15372
|
9152 |
+
15385
|
9153 |
+
15386
|
9154 |
+
15389
|
9155 |
+
15391
|
9156 |
+
15393
|
9157 |
+
15396
|
9158 |
+
15402
|
9159 |
+
15413
|
9160 |
+
15415
|
9161 |
+
15418
|
9162 |
+
15422
|
9163 |
+
15424
|
9164 |
+
15427
|
9165 |
+
15429
|
9166 |
+
15435
|
9167 |
+
15440
|
9168 |
+
15441
|
9169 |
+
15442
|
9170 |
+
15449
|
9171 |
+
15451
|
9172 |
+
15453
|
9173 |
+
15454
|
9174 |
+
15458
|
9175 |
+
15459
|
9176 |
+
15461
|
9177 |
+
15463
|
9178 |
+
15465
|
9179 |
+
15467
|
9180 |
+
15468
|
9181 |
+
15471
|
9182 |
+
15475
|
9183 |
+
15477
|
9184 |
+
15478
|
9185 |
+
15485
|
9186 |
+
15487
|
9187 |
+
15495
|
9188 |
+
15496
|
9189 |
+
15497
|
9190 |
+
15499
|
9191 |
+
15508
|
9192 |
+
15512
|
9193 |
+
15525
|
9194 |
+
15538
|
9195 |
+
15539
|
9196 |
+
15540
|
9197 |
+
15544
|
9198 |
+
15553
|
9199 |
+
15554
|
9200 |
+
15556
|
9201 |
+
15557
|
9202 |
+
15559
|
9203 |
+
15561
|
9204 |
+
15562
|
9205 |
+
15563
|
9206 |
+
15564
|
9207 |
+
15568
|
9208 |
+
15569
|
9209 |
+
15571
|
9210 |
+
15575
|
9211 |
+
15576
|
9212 |
+
15580
|
9213 |
+
15582
|
9214 |
+
15583
|
9215 |
+
15585
|
9216 |
+
15590
|
9217 |
+
15591
|
9218 |
+
15592
|
9219 |
+
15595
|
9220 |
+
15596
|
9221 |
+
15597
|
9222 |
+
15598
|
9223 |
+
15600
|
9224 |
+
15601
|
9225 |
+
15607
|
9226 |
+
15608
|
9227 |
+
15616
|
9228 |
+
15618
|
9229 |
+
15619
|
9230 |
+
15620
|
9231 |
+
15621
|
9232 |
+
15627
|
9233 |
+
15631
|
9234 |
+
15636
|
9235 |
+
15642
|
9236 |
+
15643
|
9237 |
+
15656
|
9238 |
+
15663
|
9239 |
+
15667
|
9240 |
+
15671
|
9241 |
+
15672
|
9242 |
+
15677
|
9243 |
+
15678
|
9244 |
+
15685
|
9245 |
+
15686
|
9246 |
+
15691
|
9247 |
+
15695
|
9248 |
+
15698
|
9249 |
+
15701
|
9250 |
+
15702
|
9251 |
+
15704
|
9252 |
+
15707
|
9253 |
+
15708
|
9254 |
+
15713
|
9255 |
+
15719
|
9256 |
+
15722
|
9257 |
+
15723
|
9258 |
+
15724
|
9259 |
+
15726
|
9260 |
+
15728
|
9261 |
+
15730
|
9262 |
+
15733
|
9263 |
+
15734
|
9264 |
+
15735
|
9265 |
+
15736
|
9266 |
+
15737
|
9267 |
+
15738
|
9268 |
+
15739
|
9269 |
+
15740
|
9270 |
+
15741
|
9271 |
+
15742
|
9272 |
+
15743
|
9273 |
+
15745
|
9274 |
+
15746
|
9275 |
+
15748
|
9276 |
+
982
|
9277 |
+
15753
|
9278 |
+
15755
|
9279 |
+
15760
|
9280 |
+
15763
|
9281 |
+
15765
|
9282 |
+
15766
|
9283 |
+
15773
|
9284 |
+
15777
|
9285 |
+
15780
|
9286 |
+
15782
|
9287 |
+
15784
|
9288 |
+
15791
|
9289 |
+
15792
|
9290 |
+
15793
|
9291 |
+
15796
|
9292 |
+
15799
|
9293 |
+
15803
|
9294 |
+
15804
|
9295 |
+
15815
|
9296 |
+
15818
|
9297 |
+
15820
|
9298 |
+
15821
|
9299 |
+
15825
|
9300 |
+
15830
|
9301 |
+
15839
|
9302 |
+
15842
|
9303 |
+
15845
|
9304 |
+
15846
|
9305 |
+
15847
|
9306 |
+
15849
|
9307 |
+
15854
|
9308 |
+
15855
|
9309 |
+
15858
|
9310 |
+
15859
|
9311 |
+
15863
|
9312 |
+
15866
|
9313 |
+
15868
|
9314 |
+
15873
|
9315 |
+
15877
|
9316 |
+
15881
|
9317 |
+
15883
|
9318 |
+
15885
|
9319 |
+
15888
|
9320 |
+
15890
|
9321 |
+
15896
|
9322 |
+
15897
|
9323 |
+
15899
|
9324 |
+
15900
|
9325 |
+
15901
|
9326 |
+
15905
|
9327 |
+
15909
|
9328 |
+
15910
|
9329 |
+
15916
|
9330 |
+
15917
|
9331 |
+
15928
|
9332 |
+
15930
|
9333 |
+
15935
|
9334 |
+
15938
|
9335 |
+
15939
|
9336 |
+
15940
|
9337 |
+
15947
|
9338 |
+
15952
|
9339 |
+
15954
|
9340 |
+
15955
|
9341 |
+
15957
|
9342 |
+
15958
|
9343 |
+
15961
|
9344 |
+
15962
|
9345 |
+
15963
|
9346 |
+
15965
|
9347 |
+
15975
|
9348 |
+
15976
|
9349 |
+
15977
|
9350 |
+
15978
|
9351 |
+
15980
|
9352 |
+
15993
|
9353 |
+
15994
|
9354 |
+
15999
|
9355 |
+
16000
|
9356 |
+
16007
|
9357 |
+
16010
|
9358 |
+
16011
|
9359 |
+
16012
|
9360 |
+
16014
|
9361 |
+
16018
|
9362 |
+
16019
|
9363 |
+
16020
|
9364 |
+
16021
|
9365 |
+
16023
|
9366 |
+
16024
|
9367 |
+
16027
|
9368 |
+
16030
|
9369 |
+
16031
|
9370 |
+
16032
|
9371 |
+
16035
|
9372 |
+
16036
|
9373 |
+
16044
|
9374 |
+
16045
|
9375 |
+
16055
|
9376 |
+
16057
|
9377 |
+
16060
|
9378 |
+
16062
|
9379 |
+
16067
|
9380 |
+
16072
|
9381 |
+
16076
|
9382 |
+
16077
|
9383 |
+
16078
|
9384 |
+
16087
|
9385 |
+
16088
|
9386 |
+
16090
|
9387 |
+
16095
|
9388 |
+
16098
|
9389 |
+
16101
|
9390 |
+
16108
|
9391 |
+
16109
|
9392 |
+
16110
|
9393 |
+
16112
|
9394 |
+
16113
|
9395 |
+
16116
|
9396 |
+
16117
|
9397 |
+
16119
|
9398 |
+
16123
|
9399 |
+
16124
|
9400 |
+
16125
|
9401 |
+
16132
|
9402 |
+
16133
|
9403 |
+
16135
|
9404 |
+
16137
|
9405 |
+
16145
|
9406 |
+
16149
|
9407 |
+
16156
|
9408 |
+
16157
|
9409 |
+
16159
|
9410 |
+
16160
|
9411 |
+
16162
|
9412 |
+
16163
|
9413 |
+
16165
|
9414 |
+
16166
|
9415 |
+
16168
|
9416 |
+
16171
|
9417 |
+
16172
|
9418 |
+
16174
|
9419 |
+
16175
|
9420 |
+
16179
|
9421 |
+
16181
|
9422 |
+
16185
|
9423 |
+
16188
|
9424 |
+
16190
|
9425 |
+
16193
|
9426 |
+
16194
|
9427 |
+
16195
|
9428 |
+
16203
|
9429 |
+
16208
|
9430 |
+
16209
|
9431 |
+
16214
|
9432 |
+
16215
|
9433 |
+
16216
|
9434 |
+
16217
|
9435 |
+
16230
|
9436 |
+
16231
|
9437 |
+
16233
|
9438 |
+
16235
|
9439 |
+
16241
|
9440 |
+
16243
|
9441 |
+
16252
|
9442 |
+
16257
|
9443 |
+
16259
|
9444 |
+
16260
|
9445 |
+
16267
|
9446 |
+
16272
|
9447 |
+
16274
|
9448 |
+
16275
|
9449 |
+
16278
|
9450 |
+
16280
|
9451 |
+
16287
|
9452 |
+
16290
|
9453 |
+
16296
|
9454 |
+
16297
|
9455 |
+
16300
|
9456 |
+
16304
|
9457 |
+
16305
|
9458 |
+
16306
|
9459 |
+
16307
|
9460 |
+
16308
|
9461 |
+
16312
|
9462 |
+
16318
|
9463 |
+
16323
|
9464 |
+
16328
|
9465 |
+
16329
|
9466 |
+
16330
|
9467 |
+
16331
|
9468 |
+
16333
|
9469 |
+
16334
|
9470 |
+
16335
|
9471 |
+
16338
|
9472 |
+
16344
|
9473 |
+
16350
|
9474 |
+
16351
|
9475 |
+
16357
|
9476 |
+
16358
|
9477 |
+
16359
|
9478 |
+
16363
|
9479 |
+
16364
|
9480 |
+
16365
|
9481 |
+
16373
|
9482 |
+
16376
|
9483 |
+
16379
|
9484 |
+
16380
|
9485 |
+
16389
|
9486 |
+
16391
|
9487 |
+
16392
|
9488 |
+
16395
|
9489 |
+
16396
|
9490 |
+
16400
|
9491 |
+
16404
|
9492 |
+
16405
|
9493 |
+
16406
|
9494 |
+
16407
|
9495 |
+
16408
|
9496 |
+
16409
|
9497 |
+
16413
|
9498 |
+
16416
|
9499 |
+
16421
|
9500 |
+
16430
|
9501 |
+
16432
|
9502 |
+
16434
|
9503 |
+
16437
|
9504 |
+
16439
|
9505 |
+
16440
|
9506 |
+
16441
|
9507 |
+
16442
|
9508 |
+
16448
|
9509 |
+
16451
|
9510 |
+
16452
|
9511 |
+
16453
|
9512 |
+
16460
|
9513 |
+
16463
|
9514 |
+
16471
|
9515 |
+
16472
|
9516 |
+
16475
|
9517 |
+
16476
|
9518 |
+
16479
|
9519 |
+
16490
|
9520 |
+
16492
|
9521 |
+
16493
|
9522 |
+
16494
|
9523 |
+
16497
|
9524 |
+
16510
|
9525 |
+
16514
|
9526 |
+
16519
|
9527 |
+
16520
|
9528 |
+
16524
|
9529 |
+
16525
|
9530 |
+
16528
|
9531 |
+
16529
|
9532 |
+
16533
|
9533 |
+
16538
|
9534 |
+
16542
|
9535 |
+
16545
|
9536 |
+
16546
|
9537 |
+
16556
|
9538 |
+
16558
|
9539 |
+
16561
|
9540 |
+
16563
|
9541 |
+
16567
|
9542 |
+
16568
|
9543 |
+
16571
|
9544 |
+
16576
|
9545 |
+
16584
|
9546 |
+
16592
|
9547 |
+
16600
|
9548 |
+
16601
|
9549 |
+
16603
|
9550 |
+
16604
|
9551 |
+
16607
|
9552 |
+
16608
|
9553 |
+
16609
|
9554 |
+
16615
|
9555 |
+
16619
|
9556 |
+
16621
|
9557 |
+
16626
|
9558 |
+
16632
|
9559 |
+
16634
|
9560 |
+
16635
|
9561 |
+
16638
|
9562 |
+
16642
|
9563 |
+
16646
|
9564 |
+
16651
|
9565 |
+
16664
|
9566 |
+
16665
|
9567 |
+
16667
|
9568 |
+
16669
|
9569 |
+
16670
|
9570 |
+
16671
|
9571 |
+
16675
|
9572 |
+
16676
|
9573 |
+
16681
|
9574 |
+
16684
|
9575 |
+
16685
|
9576 |
+
16686
|
9577 |
+
16688
|
9578 |
+
16696
|
9579 |
+
16705
|
9580 |
+
16709
|
9581 |
+
16710
|
9582 |
+
16711
|
9583 |
+
16720
|
9584 |
+
16721
|
9585 |
+
16724
|
9586 |
+
16729
|
9587 |
+
16742
|
9588 |
+
16743
|
9589 |
+
16745
|
9590 |
+
16746
|
9591 |
+
16751
|
9592 |
+
16752
|
9593 |
+
16755
|
9594 |
+
16756
|
9595 |
+
983
|
9596 |
+
16760
|
9597 |
+
16765
|
9598 |
+
16767
|
9599 |
+
16770
|
9600 |
+
16774
|
9601 |
+
16775
|
9602 |
+
16778
|
9603 |
+
16779
|
9604 |
+
16780
|
9605 |
+
16789
|
9606 |
+
16792
|
9607 |
+
16794
|
9608 |
+
16798
|
9609 |
+
16802
|
9610 |
+
16804
|
9611 |
+
16805
|
9612 |
+
16806
|
9613 |
+
16807
|
9614 |
+
16815
|
9615 |
+
16816
|
9616 |
+
16818
|
9617 |
+
16820
|
9618 |
+
16827
|
9619 |
+
16828
|
9620 |
+
16832
|
9621 |
+
16833
|
9622 |
+
16840
|
9623 |
+
16844
|
9624 |
+
16848
|
9625 |
+
16849
|
9626 |
+
16851
|
9627 |
+
16853
|
9628 |
+
16856
|
9629 |
+
16859
|
9630 |
+
16860
|
9631 |
+
16863
|
9632 |
+
16864
|
9633 |
+
16865
|
9634 |
+
16866
|
9635 |
+
16870
|
9636 |
+
16880
|
9637 |
+
16888
|
9638 |
+
16889
|
9639 |
+
16893
|
9640 |
+
16894
|
9641 |
+
16895
|
9642 |
+
16896
|
9643 |
+
16897
|
9644 |
+
16900
|
9645 |
+
16902
|
9646 |
+
16905
|
9647 |
+
16907
|
9648 |
+
16909
|
9649 |
+
16913
|
9650 |
+
16915
|
9651 |
+
16917
|
9652 |
+
16920
|
9653 |
+
16922
|
9654 |
+
16923
|
9655 |
+
16926
|
9656 |
+
16931
|
9657 |
+
16932
|
9658 |
+
16935
|
9659 |
+
16943
|
9660 |
+
16946
|
9661 |
+
16955
|
9662 |
+
16963
|
9663 |
+
16972
|
9664 |
+
16977
|
9665 |
+
16978
|
9666 |
+
16980
|
9667 |
+
16982
|
9668 |
+
16985
|
9669 |
+
16987
|
9670 |
+
16988
|
9671 |
+
16990
|
9672 |
+
16992
|
9673 |
+
16996
|
9674 |
+
16997
|
9675 |
+
17003
|
9676 |
+
17004
|
9677 |
+
17008
|
9678 |
+
17009
|
9679 |
+
17013
|
9680 |
+
17018
|
9681 |
+
17019
|
9682 |
+
17022
|
9683 |
+
17023
|
9684 |
+
17025
|
9685 |
+
17028
|
9686 |
+
17032
|
9687 |
+
17036
|
9688 |
+
17037
|
9689 |
+
17041
|
9690 |
+
17044
|
9691 |
+
17047
|
9692 |
+
17052
|
9693 |
+
17053
|
9694 |
+
17054
|
9695 |
+
17055
|
9696 |
+
17067
|
9697 |
+
17068
|
9698 |
+
17072
|
9699 |
+
17078
|
9700 |
+
17096
|
9701 |
+
17102
|
9702 |
+
17109
|
9703 |
+
17122
|
9704 |
+
17125
|
9705 |
+
17126
|
9706 |
+
17129
|
9707 |
+
17133
|
9708 |
+
17136
|
9709 |
+
17141
|
9710 |
+
17143
|
9711 |
+
17148
|
9712 |
+
17150
|
9713 |
+
17156
|
9714 |
+
17159
|
9715 |
+
17161
|
9716 |
+
17170
|
9717 |
+
17171
|
9718 |
+
17173
|
9719 |
+
17174
|
9720 |
+
17180
|
9721 |
+
17182
|
9722 |
+
17184
|
9723 |
+
17186
|
9724 |
+
17193
|
9725 |
+
17195
|
9726 |
+
17198
|
9727 |
+
17201
|
9728 |
+
17203
|
9729 |
+
17207
|
9730 |
+
17208
|
9731 |
+
17218
|
9732 |
+
17223
|
9733 |
+
17225
|
9734 |
+
17226
|
9735 |
+
17232
|
9736 |
+
17243
|
9737 |
+
17246
|
9738 |
+
17263
|
9739 |
+
17264
|
9740 |
+
17268
|
9741 |
+
17269
|
9742 |
+
17270
|
9743 |
+
17279
|
9744 |
+
17283
|
9745 |
+
17284
|
9746 |
+
17287
|
9747 |
+
17289
|
9748 |
+
17290
|
9749 |
+
17295
|
9750 |
+
17301
|
9751 |
+
17305
|
9752 |
+
17312
|
9753 |
+
17313
|
9754 |
+
17314
|
9755 |
+
17316
|
9756 |
+
17320
|
9757 |
+
17321
|
9758 |
+
17324
|
9759 |
+
17325
|
9760 |
+
17327
|
9761 |
+
17328
|
9762 |
+
17329
|
9763 |
+
17331
|
9764 |
+
17332
|
9765 |
+
17334
|
9766 |
+
17341
|
9767 |
+
17344
|
9768 |
+
17345
|
9769 |
+
17353
|
9770 |
+
17354
|
9771 |
+
17355
|
9772 |
+
17356
|
9773 |
+
17357
|
9774 |
+
17360
|
9775 |
+
17361
|
9776 |
+
17363
|
9777 |
+
17365
|
9778 |
+
17366
|
9779 |
+
17368
|
9780 |
+
17369
|
9781 |
+
17370
|
9782 |
+
17371
|
9783 |
+
17374
|
9784 |
+
17375
|
9785 |
+
17376
|
9786 |
+
17377
|
9787 |
+
17378
|
9788 |
+
17379
|
9789 |
+
17381
|
9790 |
+
17382
|
9791 |
+
17384
|
9792 |
+
17385
|
9793 |
+
17388
|
9794 |
+
17389
|
9795 |
+
17390
|
9796 |
+
17391
|
9797 |
+
17392
|
9798 |
+
17394
|
9799 |
+
17395
|
9800 |
+
17397
|
9801 |
+
17399
|
9802 |
+
17400
|
9803 |
+
17402
|
9804 |
+
17403
|
9805 |
+
17404
|
9806 |
+
17407
|
9807 |
+
17408
|
9808 |
+
17409
|
9809 |
+
17410
|
9810 |
+
17411
|
9811 |
+
17415
|
9812 |
+
17416
|
9813 |
+
17417
|
9814 |
+
17418
|
9815 |
+
17419
|
9816 |
+
17420
|
9817 |
+
17421
|
9818 |
+
17422
|
9819 |
+
17423
|
9820 |
+
17424
|
9821 |
+
17425
|
9822 |
+
17426
|
9823 |
+
17427
|
9824 |
+
17428
|
9825 |
+
17429
|
9826 |
+
17430
|
9827 |
+
17431
|
9828 |
+
17432
|
9829 |
+
17434
|
9830 |
+
17435
|
9831 |
+
17437
|
9832 |
+
17438
|
9833 |
+
17439
|
9834 |
+
17440
|
9835 |
+
17441
|
9836 |
+
17442
|
9837 |
+
17444
|
9838 |
+
17445
|
9839 |
+
17446
|
9840 |
+
17447
|
9841 |
+
17450
|
9842 |
+
17455
|
9843 |
+
17457
|
9844 |
+
17459
|
9845 |
+
17462
|
9846 |
+
17463
|
9847 |
+
17464
|
9848 |
+
17465
|
9849 |
+
17466
|
9850 |
+
17467
|
9851 |
+
17470
|
9852 |
+
17471
|
9853 |
+
17472
|
9854 |
+
17473
|
9855 |
+
17474
|
9856 |
+
17475
|
9857 |
+
17478
|
9858 |
+
17479
|
9859 |
+
17480
|
9860 |
+
17481
|
9861 |
+
17483
|
9862 |
+
17485
|
9863 |
+
17486
|
9864 |
+
17497
|
9865 |
+
17501
|
9866 |
+
17507
|
9867 |
+
17508
|
9868 |
+
17513
|
9869 |
+
17514
|
9870 |
+
17518
|
9871 |
+
17519
|
9872 |
+
17523
|
9873 |
+
17524
|
9874 |
+
17525
|
9875 |
+
17527
|
9876 |
+
17531
|
9877 |
+
17532
|
9878 |
+
17533
|
9879 |
+
17535
|
9880 |
+
17536
|
9881 |
+
17537
|
9882 |
+
17538
|
9883 |
+
17541
|
9884 |
+
17544
|
9885 |
+
17549
|
9886 |
+
17554
|
9887 |
+
17555
|
9888 |
+
17566
|
9889 |
+
17575
|
9890 |
+
17580
|
9891 |
+
17581
|
9892 |
+
17582
|
9893 |
+
17583
|
9894 |
+
17584
|
9895 |
+
17585
|
9896 |
+
17586
|
9897 |
+
17589
|
9898 |
+
17593
|
9899 |
+
17594
|
9900 |
+
17599
|
9901 |
+
17601
|
9902 |
+
17603
|
9903 |
+
17606
|
9904 |
+
17609
|
9905 |
+
17610
|
9906 |
+
17611
|
9907 |
+
17613
|
9908 |
+
17614
|
9909 |
+
17616
|
9910 |
+
17618
|
9911 |
+
17620
|
9912 |
+
17621
|
9913 |
+
17625
|
9914 |
+
17626
|
9915 |
+
17627
|
9916 |
+
17628
|
9917 |
+
17630
|
9918 |
+
17633
|
9919 |
+
17634
|
9920 |
+
17636
|
9921 |
+
17640
|
9922 |
+
17641
|
9923 |
+
17642
|
9924 |
+
17645
|
9925 |
+
17648
|
9926 |
+
17649
|
9927 |
+
17650
|
9928 |
+
17651
|
9929 |
+
17652
|
9930 |
+
17656
|
9931 |
+
17658
|
9932 |
+
17659
|
9933 |
+
17661
|
9934 |
+
17663
|
9935 |
+
17665
|
9936 |
+
17666
|
9937 |
+
17667
|
9938 |
+
17669
|
9939 |
+
17670
|
9940 |
+
17672
|
9941 |
+
17674
|
9942 |
+
17675
|
9943 |
+
17676
|
9944 |
+
17677
|
9945 |
+
17678
|
9946 |
+
17679
|
9947 |
+
17681
|
9948 |
+
17687
|
9949 |
+
17691
|
9950 |
+
17693
|
9951 |
+
17694
|
9952 |
+
17695
|
9953 |
+
17696
|
9954 |
+
17699
|
9955 |
+
17703
|
9956 |
+
17704
|
9957 |
+
17707
|
9958 |
+
17726
|
9959 |
+
17736
|
9960 |
+
17737
|
9961 |
+
17738
|
9962 |
+
17740
|
9963 |
+
17741
|
9964 |
+
17742
|
9965 |
+
17744
|
9966 |
+
17745
|
9967 |
+
17748
|
9968 |
+
17749
|
9969 |
+
17750
|
9970 |
+
17751
|
9971 |
+
17752
|
9972 |
+
17753
|
9973 |
+
17754
|
9974 |
+
17755
|
9975 |
+
17757
|
9976 |
+
17758
|
9977 |
+
17759
|
9978 |
+
17760
|
9979 |
+
17763
|
9980 |
+
17777
|
9981 |
+
17778
|
9982 |
+
17779
|
9983 |
+
17780
|
9984 |
+
17785
|
9985 |
+
17786
|
9986 |
+
17789
|
9987 |
+
17790
|
9988 |
+
17795
|
9989 |
+
17797
|
9990 |
+
17798
|
9991 |
+
17799
|
9992 |
+
17801
|
9993 |
+
17802
|
9994 |
+
17804
|
9995 |
+
17807
|
9996 |
+
17808
|
9997 |
+
17809
|
9998 |
+
17811
|
9999 |
+
17813
|
10000 |
+
17814
|
10001 |
+
17818
|
10002 |
+
17819
|
10003 |
+
17821
|
10004 |
+
17826
|
10005 |
+
17827
|
10006 |
+
17828
|
10007 |
+
17829
|
10008 |
+
17830
|
10009 |
+
17832
|
10010 |
+
17833
|
10011 |
+
17835
|
10012 |
+
17836
|
10013 |
+
17841
|
10014 |
+
17850
|
10015 |
+
17854
|
10016 |
+
17856
|
10017 |
+
17857
|
10018 |
+
17860
|
10019 |
+
17861
|
10020 |
+
17862
|
10021 |
+
17865
|
10022 |
+
17867
|
10023 |
+
17868
|
10024 |
+
17870
|
10025 |
+
17871
|
10026 |
+
17872
|
10027 |
+
17873
|
10028 |
+
17874
|
10029 |
+
17875
|
10030 |
+
17876
|
10031 |
+
17877
|
10032 |
+
17878
|
10033 |
+
17879
|
10034 |
+
17881
|
10035 |
+
17883
|
10036 |
+
17884
|
10037 |
+
17885
|
10038 |
+
17886
|
10039 |
+
17887
|
10040 |
+
17888
|
10041 |
+
17889
|
10042 |
+
17891
|
10043 |
+
17892
|
10044 |
+
17893
|
10045 |
+
17894
|
10046 |
+
17895
|
10047 |
+
17896
|
10048 |
+
17897
|
10049 |
+
17900
|
10050 |
+
17901
|
10051 |
+
17903
|
10052 |
+
17904
|
10053 |
+
17905
|
10054 |
+
17906
|
10055 |
+
17910
|
10056 |
+
17912
|
10057 |
+
17915
|
10058 |
+
17917
|
10059 |
+
17920
|
10060 |
+
17929
|
10061 |
+
17933
|
10062 |
+
17934
|
10063 |
+
17940
|
10064 |
+
17944
|
10065 |
+
17949
|
10066 |
+
17951
|
10067 |
+
17953
|
10068 |
+
17954
|
10069 |
+
17955
|
10070 |
+
17961
|
10071 |
+
17962
|
10072 |
+
17963
|
10073 |
+
17964
|
10074 |
+
17965
|
10075 |
+
17968
|
10076 |
+
17969
|
10077 |
+
17970
|
10078 |
+
17973
|
10079 |
+
17974
|
10080 |
+
17975
|
10081 |
+
17976
|
10082 |
+
17978
|
10083 |
+
17979
|
10084 |
+
17980
|
10085 |
+
17981
|
10086 |
+
17985
|
10087 |
+
17986
|
10088 |
+
17987
|
10089 |
+
17988
|
10090 |
+
17990
|
10091 |
+
17991
|
10092 |
+
17994
|
10093 |
+
17995
|
10094 |
+
17998
|
10095 |
+
17999
|
10096 |
+
18000
|
10097 |
+
18002
|
10098 |
+
18005
|
10099 |
+
18006
|
10100 |
+
18007
|
10101 |
+
18014
|
10102 |
+
18015
|
10103 |
+
18020
|
10104 |
+
18022
|
10105 |
+
18023
|
10106 |
+
18027
|
10107 |
+
18032
|
10108 |
+
18035
|
10109 |
+
18036
|
10110 |
+
18037
|
10111 |
+
18038
|
10112 |
+
18039
|
10113 |
+
18040
|
10114 |
+
18041
|
10115 |
+
18042
|
10116 |
+
18043
|
10117 |
+
18045
|
10118 |
+
18047
|
10119 |
+
18050
|
10120 |
+
18051
|
10121 |
+
984
|
10122 |
+
18052
|
10123 |
+
18053
|
10124 |
+
18055
|
10125 |
+
18059
|
10126 |
+
18064
|
10127 |
+
18065
|
10128 |
+
18071
|
10129 |
+
18073
|
10130 |
+
18075
|
10131 |
+
18077
|
10132 |
+
18078
|
10133 |
+
18080
|
10134 |
+
18081
|
10135 |
+
18082
|
10136 |
+
18084
|
10137 |
+
18085
|
10138 |
+
18086
|
10139 |
+
18087
|
10140 |
+
18091
|
10141 |
+
18092
|
10142 |
+
18095
|
10143 |
+
18099
|
10144 |
+
18100
|
10145 |
+
18102
|
10146 |
+
18103
|
10147 |
+
18104
|
10148 |
+
18105
|
10149 |
+
18106
|
10150 |
+
18107
|
10151 |
+
18108
|
10152 |
+
18110
|
10153 |
+
18111
|
10154 |
+
18113
|
10155 |
+
18114
|
10156 |
+
18117
|
10157 |
+
18118
|
10158 |
+
18120
|
10159 |
+
18121
|
10160 |
+
18122
|
10161 |
+
18124
|
10162 |
+
18125
|
10163 |
+
18127
|
10164 |
+
18128
|
10165 |
+
18129
|
10166 |
+
18130
|
10167 |
+
18131
|
10168 |
+
18134
|
10169 |
+
18135
|
10170 |
+
18137
|
10171 |
+
18139
|
10172 |
+
18140
|
10173 |
+
18144
|
10174 |
+
18150
|
10175 |
+
18151
|
10176 |
+
18152
|
10177 |
+
18154
|
10178 |
+
18155
|
10179 |
+
18156
|
10180 |
+
18158
|
10181 |
+
18159
|
10182 |
+
18160
|
10183 |
+
18162
|
10184 |
+
18164
|
10185 |
+
18165
|
10186 |
+
18166
|
10187 |
+
18174
|
10188 |
+
18175
|
10189 |
+
18181
|
10190 |
+
18182
|
10191 |
+
18187
|
10192 |
+
18188
|
10193 |
+
18191
|
10194 |
+
18192
|
10195 |
+
18218
|
10196 |
+
985
|
10197 |
+
18219
|
10198 |
+
18220
|
10199 |
+
18222
|
10200 |
+
18226
|
10201 |
+
18229
|
10202 |
+
18230
|
10203 |
+
18231
|
10204 |
+
18232
|
10205 |
+
18234
|
10206 |
+
18235
|
10207 |
+
18236
|
10208 |
+
18237
|
10209 |
+
18240
|
10210 |
+
18241
|
10211 |
+
18243
|
10212 |
+
18244
|
10213 |
+
18246
|
10214 |
+
18248
|
10215 |
+
18249
|
10216 |
+
18250
|
10217 |
+
18252
|
10218 |
+
18253
|
10219 |
+
18254
|
10220 |
+
18256
|
10221 |
+
18259
|
10222 |
+
18260
|
10223 |
+
18262
|
10224 |
+
18263
|
10225 |
+
18264
|
10226 |
+
18265
|
10227 |
+
18267
|
10228 |
+
18270
|
10229 |
+
18272
|
10230 |
+
18273
|
10231 |
+
18274
|
10232 |
+
18277
|
10233 |
+
18278
|
10234 |
+
18282
|
10235 |
+
18283
|
10236 |
+
18284
|
10237 |
+
18286
|
10238 |
+
18287
|
10239 |
+
18288
|
10240 |
+
18289
|
10241 |
+
18290
|
10242 |
+
18293
|
10243 |
+
18296
|
10244 |
+
18297
|
10245 |
+
18299
|
10246 |
+
18300
|
10247 |
+
18302
|
10248 |
+
18303
|
10249 |
+
18309
|
10250 |
+
18310
|
10251 |
+
18311
|
10252 |
+
18312
|
10253 |
+
18316
|
10254 |
+
18317
|
10255 |
+
18318
|
10256 |
+
18319
|
10257 |
+
18320
|
10258 |
+
18321
|
10259 |
+
18322
|
10260 |
+
18324
|
10261 |
+
18335
|
10262 |
+
18338
|
10263 |
+
18339
|
10264 |
+
18340
|
10265 |
+
18341
|
10266 |
+
18344
|
10267 |
+
18345
|
10268 |
+
18346
|
10269 |
+
18347
|
10270 |
+
18348
|
10271 |
+
18349
|
10272 |
+
18351
|
10273 |
+
18356
|
10274 |
+
18357
|
10275 |
+
18362
|
10276 |
+
18364
|
10277 |
+
18368
|
10278 |
+
18370
|
10279 |
+
18371
|
10280 |
+
18372
|
10281 |
+
18373
|
10282 |
+
18374
|
10283 |
+
18377
|
10284 |
+
18381
|
10285 |
+
18386
|
10286 |
+
18394
|
10287 |
+
18399
|
10288 |
+
18401
|
10289 |
+
18404
|
10290 |
+
18405
|
10291 |
+
18407
|
10292 |
+
18409
|
10293 |
+
18411
|
10294 |
+
18412
|
10295 |
+
18413
|
10296 |
+
18414
|
10297 |
+
18416
|
10298 |
+
18417
|
10299 |
+
18418
|
10300 |
+
18420
|
10301 |
+
18425
|
10302 |
+
18426
|
10303 |
+
18432
|
10304 |
+
18435
|
10305 |
+
18436
|
10306 |
+
18437
|
10307 |
+
18439
|
10308 |
+
18454
|
10309 |
+
18455
|
10310 |
+
18456
|
10311 |
+
18457
|
10312 |
+
18458
|
10313 |
+
18459
|
10314 |
+
18460
|
10315 |
+
18461
|
10316 |
+
18464
|
10317 |
+
18465
|
10318 |
+
18466
|
10319 |
+
18468
|
10320 |
+
18469
|
10321 |
+
18472
|
10322 |
+
18473
|
10323 |
+
18474
|
10324 |
+
18475
|
10325 |
+
18476
|
10326 |
+
18479
|
10327 |
+
18482
|
10328 |
+
18483
|
10329 |
+
18486
|
10330 |
+
18491
|
10331 |
+
18492
|
10332 |
+
18493
|
10333 |
+
18495
|
10334 |
+
18499
|
10335 |
+
18500
|
10336 |
+
18501
|
10337 |
+
18502
|
10338 |
+
18504
|
10339 |
+
18505
|
10340 |
+
18507
|
10341 |
+
18508
|
10342 |
+
18509
|
10343 |
+
18510
|
10344 |
+
18511
|
10345 |
+
18512
|
10346 |
+
18514
|
10347 |
+
18519
|
10348 |
+
18520
|
10349 |
+
18521
|
10350 |
+
18522
|
10351 |
+
18523
|
10352 |
+
18524
|
10353 |
+
18525
|
10354 |
+
18527
|
10355 |
+
18528
|
10356 |
+
18529
|
10357 |
+
18530
|
10358 |
+
18531
|
10359 |
+
18535
|
10360 |
+
18536
|
10361 |
+
18537
|
10362 |
+
18538
|
10363 |
+
18542
|
10364 |
+
18544
|
10365 |
+
18545
|
10366 |
+
18546
|
10367 |
+
986
|
10368 |
+
18548
|
10369 |
+
18549
|
10370 |
+
18551
|
10371 |
+
18552
|
10372 |
+
18553
|
10373 |
+
18554
|
10374 |
+
18555
|
10375 |
+
18558
|
10376 |
+
18559
|
10377 |
+
18561
|
10378 |
+
18562
|
10379 |
+
18563
|
10380 |
+
18565
|
10381 |
+
18566
|
10382 |
+
18567
|
10383 |
+
18568
|
10384 |
+
18570
|
10385 |
+
18571
|
10386 |
+
18572
|
10387 |
+
18574
|
10388 |
+
18577
|
10389 |
+
18582
|
10390 |
+
18585
|
10391 |
+
18586
|
10392 |
+
18587
|
10393 |
+
18588
|
10394 |
+
18589
|
10395 |
+
18590
|
10396 |
+
18593
|
10397 |
+
18595
|
10398 |
+
18597
|
10399 |
+
18598
|
10400 |
+
18599
|
10401 |
+
18600
|
10402 |
+
18601
|
10403 |
+
18602
|
10404 |
+
18603
|
10405 |
+
18604
|
10406 |
+
18605
|
10407 |
+
18606
|
10408 |
+
18607
|
10409 |
+
18609
|
10410 |
+
18610
|
10411 |
+
18613
|
10412 |
+
18615
|
10413 |
+
18616
|
10414 |
+
18618
|
10415 |
+
18619
|
10416 |
+
18622
|
10417 |
+
18623
|
10418 |
+
18624
|
10419 |
+
18625
|
10420 |
+
18627
|
10421 |
+
18628
|
10422 |
+
18629
|
10423 |
+
18630
|
10424 |
+
18631
|
10425 |
+
18632
|
10426 |
+
18633
|
10427 |
+
18634
|
10428 |
+
18635
|
10429 |
+
18636
|
10430 |
+
18643
|
10431 |
+
18644
|
10432 |
+
18645
|
10433 |
+
18646
|
10434 |
+
18648
|
10435 |
+
18649
|
10436 |
+
18650
|
10437 |
+
18653
|
10438 |
+
18654
|
10439 |
+
18658
|
10440 |
+
18659
|
10441 |
+
18663
|
10442 |
+
18669
|
10443 |
+
18671
|
10444 |
+
18672
|
10445 |
+
18674
|
10446 |
+
18676
|
10447 |
+
18677
|
10448 |
+
18681
|
10449 |
+
18682
|
10450 |
+
18683
|
10451 |
+
18684
|
10452 |
+
18692
|
10453 |
+
18693
|
10454 |
+
18696
|
10455 |
+
18697
|
10456 |
+
18699
|
10457 |
+
18703
|
10458 |
+
18706
|
10459 |
+
18708
|
10460 |
+
18709
|
10461 |
+
18710
|
10462 |
+
18715
|
10463 |
+
18717
|
10464 |
+
18727
|
10465 |
+
18735
|
10466 |
+
18737
|
10467 |
+
18740
|
10468 |
+
18742
|
10469 |
+
18743
|
10470 |
+
18747
|
10471 |
+
18750
|
10472 |
+
18752
|
10473 |
+
18753
|
10474 |
+
18756
|
10475 |
+
18765
|
10476 |
+
18766
|
10477 |
+
18767
|
10478 |
+
18771
|
10479 |
+
18775
|
10480 |
+
18778
|
10481 |
+
18779
|
10482 |
+
18787
|
10483 |
+
18792
|
10484 |
+
18793
|
10485 |
+
18794
|
10486 |
+
18796
|
10487 |
+
18803
|
10488 |
+
987
|
10489 |
+
18806
|
10490 |
+
18807
|
10491 |
+
18809
|
10492 |
+
18812
|
10493 |
+
18813
|
10494 |
+
18816
|
10495 |
+
18819
|
10496 |
+
18820
|
10497 |
+
18827
|
10498 |
+
18828
|
10499 |
+
18833
|
10500 |
+
18836
|
10501 |
+
18837
|
10502 |
+
18839
|
10503 |
+
18840
|
10504 |
+
18845
|
10505 |
+
18846
|
10506 |
+
18847
|
10507 |
+
18848
|
10508 |
+
18852
|
10509 |
+
18853
|
10510 |
+
18854
|
10511 |
+
18858
|
10512 |
+
18859
|
10513 |
+
18860
|
10514 |
+
18861
|
10515 |
+
18863
|
10516 |
+
18864
|
10517 |
+
18865
|
10518 |
+
18866
|
10519 |
+
18868
|
10520 |
+
18869
|
10521 |
+
18870
|
10522 |
+
18871
|
10523 |
+
18874
|
10524 |
+
18875
|
10525 |
+
18876
|
10526 |
+
18877
|
10527 |
+
18878
|
10528 |
+
18879
|
10529 |
+
18880
|
10530 |
+
18881
|
10531 |
+
18882
|
10532 |
+
18883
|
10533 |
+
18884
|
10534 |
+
18885
|
10535 |
+
18896
|
10536 |
+
18897
|
10537 |
+
18899
|
10538 |
+
18901
|
10539 |
+
18911
|
10540 |
+
18913
|
10541 |
+
18917
|
10542 |
+
18919
|
10543 |
+
18922
|
10544 |
+
18924
|
10545 |
+
18925
|
10546 |
+
18926
|
10547 |
+
18932
|
10548 |
+
18935
|
10549 |
+
18938
|
10550 |
+
18939
|
10551 |
+
18940
|
10552 |
+
18944
|
10553 |
+
18948
|
10554 |
+
18950
|
10555 |
+
18953
|
10556 |
+
18954
|
10557 |
+
18956
|
10558 |
+
18957
|
10559 |
+
18959
|
10560 |
+
18960
|
10561 |
+
18961
|
10562 |
+
18964
|
10563 |
+
18965
|
10564 |
+
18966
|
10565 |
+
18968
|
10566 |
+
18973
|
10567 |
+
18990
|
10568 |
+
18991
|
10569 |
+
18992
|
10570 |
+
18995
|
10571 |
+
18997
|
10572 |
+
18999
|
10573 |
+
19000
|
10574 |
+
19007
|
10575 |
+
19023
|
10576 |
+
19024
|
10577 |
+
19027
|
10578 |
+
19032
|
10579 |
+
19038
|
10580 |
+
19040
|
10581 |
+
19042
|
10582 |
+
19044
|
10583 |
+
19045
|
10584 |
+
19046
|
10585 |
+
19050
|
10586 |
+
19063
|
10587 |
+
19067
|
10588 |
+
19080
|
10589 |
+
19081
|
10590 |
+
19084
|
10591 |
+
19086
|
10592 |
+
19088
|
10593 |
+
19089
|
10594 |
+
19090
|
10595 |
+
19091
|
10596 |
+
19093
|
10597 |
+
19094
|
10598 |
+
19097
|
10599 |
+
19098
|
10600 |
+
19099
|
10601 |
+
19103
|
10602 |
+
19104
|
10603 |
+
19107
|
10604 |
+
19111
|
10605 |
+
988
|
10606 |
+
19114
|
10607 |
+
19115
|
10608 |
+
19116
|
10609 |
+
19118
|
10610 |
+
19121
|
10611 |
+
19122
|
10612 |
+
19126
|
10613 |
+
19127
|
10614 |
+
19130
|
10615 |
+
19131
|
10616 |
+
19132
|
10617 |
+
19134
|
10618 |
+
19135
|
10619 |
+
19137
|
10620 |
+
19139
|
10621 |
+
19140
|
10622 |
+
19143
|
10623 |
+
19144
|
10624 |
+
19145
|
10625 |
+
19147
|
10626 |
+
19148
|
10627 |
+
19149
|
10628 |
+
19150
|
10629 |
+
19152
|
10630 |
+
19153
|
10631 |
+
19154
|
10632 |
+
19155
|
10633 |
+
19156
|
10634 |
+
19157
|
10635 |
+
19160
|
10636 |
+
19161
|
10637 |
+
19162
|
10638 |
+
19164
|
10639 |
+
19165
|
10640 |
+
19166
|
10641 |
+
19167
|
10642 |
+
19168
|
10643 |
+
19169
|
10644 |
+
19170
|
10645 |
+
19171
|
10646 |
+
19173
|
10647 |
+
19175
|
10648 |
+
19176
|
10649 |
+
19179
|
10650 |
+
19182
|
10651 |
+
19185
|
10652 |
+
19186
|
10653 |
+
19189
|
10654 |
+
19190
|
10655 |
+
19191
|
10656 |
+
19193
|
10657 |
+
19195
|
10658 |
+
19197
|
10659 |
+
19200
|
10660 |
+
19203
|
10661 |
+
19204
|
10662 |
+
19205
|
10663 |
+
19207
|
10664 |
+
19208
|
10665 |
+
19209
|
10666 |
+
19211
|
10667 |
+
19212
|
10668 |
+
19214
|
10669 |
+
19223
|
10670 |
+
19224
|
10671 |
+
19225
|
10672 |
+
19228
|
10673 |
+
19229
|
10674 |
+
19230
|
10675 |
+
19231
|
10676 |
+
19232
|
10677 |
+
19233
|
10678 |
+
19236
|
10679 |
+
19237
|
10680 |
+
19238
|
10681 |
+
19239
|
10682 |
+
19240
|
10683 |
+
19242
|
10684 |
+
19243
|
10685 |
+
19244
|
10686 |
+
19246
|
10687 |
+
19248
|
10688 |
+
19249
|
10689 |
+
19250
|
10690 |
+
19254
|
10691 |
+
19262
|
10692 |
+
19264
|
10693 |
+
19267
|
10694 |
+
19268
|
10695 |
+
19269
|
10696 |
+
19270
|
10697 |
+
19271
|
10698 |
+
19272
|
10699 |
+
19274
|
10700 |
+
19275
|
10701 |
+
19276
|
10702 |
+
19277
|
10703 |
+
19280
|
10704 |
+
19281
|
10705 |
+
19285
|
10706 |
+
19286
|
10707 |
+
19287
|
10708 |
+
19294
|
10709 |
+
19299
|
10710 |
+
19300
|
10711 |
+
19302
|
10712 |
+
19303
|
10713 |
+
19305
|
10714 |
+
19306
|
10715 |
+
19307
|
10716 |
+
19310
|
10717 |
+
19312
|
10718 |
+
19314
|
10719 |
+
19315
|
10720 |
+
19316
|
10721 |
+
19317
|
10722 |
+
19320
|
10723 |
+
19321
|
10724 |
+
19326
|
10725 |
+
19327
|
10726 |
+
19328
|
10727 |
+
19330
|
10728 |
+
19331
|
10729 |
+
19334
|
10730 |
+
19335
|
10731 |
+
19340
|
10732 |
+
19341
|
10733 |
+
19342
|
10734 |
+
19343
|
10735 |
+
19346
|
10736 |
+
19347
|
10737 |
+
19349
|
10738 |
+
19352
|
10739 |
+
19353
|
10740 |
+
19354
|
10741 |
+
19355
|
10742 |
+
19357
|
10743 |
+
19358
|
10744 |
+
19359
|
10745 |
+
19360
|
10746 |
+
19361
|
10747 |
+
19362
|
10748 |
+
19364
|
10749 |
+
19366
|
10750 |
+
19370
|
10751 |
+
19373
|
10752 |
+
19374
|
10753 |
+
19375
|
10754 |
+
19376
|
10755 |
+
19377
|
10756 |
+
19379
|
10757 |
+
19381
|
10758 |
+
19383
|
10759 |
+
19384
|
10760 |
+
19387
|
10761 |
+
19389
|
10762 |
+
19391
|
10763 |
+
19392
|
10764 |
+
19395
|
10765 |
+
19396
|
10766 |
+
19398
|
10767 |
+
19412
|
10768 |
+
19413
|
10769 |
+
19414
|
10770 |
+
19415
|
10771 |
+
19417
|
10772 |
+
19424
|
10773 |
+
19427
|
10774 |
+
19429
|
10775 |
+
19431
|
10776 |
+
19433
|
10777 |
+
19444
|
10778 |
+
19445
|
10779 |
+
19446
|
10780 |
+
19447
|
10781 |
+
19450
|
10782 |
+
19451
|
10783 |
+
19452
|
10784 |
+
19456
|
10785 |
+
19457
|
10786 |
+
19458
|
10787 |
+
19460
|
10788 |
+
19461
|
10789 |
+
19462
|
10790 |
+
19463
|
10791 |
+
19466
|
10792 |
+
19467
|
10793 |
+
19469
|
10794 |
+
19471
|
10795 |
+
19481
|
10796 |
+
19482
|
10797 |
+
19483
|
10798 |
+
19487
|
10799 |
+
19488
|
10800 |
+
19491
|
10801 |
+
19493
|
10802 |
+
19494
|
10803 |
+
19495
|
10804 |
+
19496
|
10805 |
+
19500
|
10806 |
+
19502
|
10807 |
+
19504
|
10808 |
+
19505
|
10809 |
+
19506
|
10810 |
+
19507
|
10811 |
+
19508
|
10812 |
+
19511
|
10813 |
+
19512
|
10814 |
+
19513
|
10815 |
+
19516
|
10816 |
+
19518
|
10817 |
+
19519
|
10818 |
+
19521
|
10819 |
+
19523
|
10820 |
+
19524
|
10821 |
+
19526
|
10822 |
+
19527
|
10823 |
+
19528
|
10824 |
+
19529
|
10825 |
+
19530
|
10826 |
+
19531
|
10827 |
+
19532
|
10828 |
+
19534
|
10829 |
+
19535
|
10830 |
+
19536
|
10831 |
+
19537
|
10832 |
+
19538
|
10833 |
+
19540
|
10834 |
+
19542
|
10835 |
+
19548
|
10836 |
+
19549
|
10837 |
+
19551
|
10838 |
+
19552
|
10839 |
+
19553
|
10840 |
+
19556
|
10841 |
+
19557
|
10842 |
+
19558
|
10843 |
+
19559
|
10844 |
+
19560
|
10845 |
+
19561
|
10846 |
+
19563
|
10847 |
+
19565
|
10848 |
+
19566
|
10849 |
+
19567
|
10850 |
+
19573
|
10851 |
+
19576
|
10852 |
+
19577
|
10853 |
+
19578
|
10854 |
+
19585
|
10855 |
+
19587
|
10856 |
+
19588
|
10857 |
+
19592
|
10858 |
+
19593
|
10859 |
+
19594
|
10860 |
+
19597
|
10861 |
+
19598
|
10862 |
+
19600
|
10863 |
+
19602
|
10864 |
+
19607
|
10865 |
+
19616
|
10866 |
+
19617
|
10867 |
+
19618
|
10868 |
+
19621
|
10869 |
+
19623
|
10870 |
+
19624
|
10871 |
+
19625
|
10872 |
+
19626
|
10873 |
+
19628
|
10874 |
+
19632
|
10875 |
+
19634
|
10876 |
+
19637
|
10877 |
+
19638
|
10878 |
+
19639
|
10879 |
+
19640
|
10880 |
+
19642
|
10881 |
+
19643
|
10882 |
+
19644
|
10883 |
+
19645
|
10884 |
+
19648
|
10885 |
+
19650
|
10886 |
+
19651
|
10887 |
+
19652
|
10888 |
+
19653
|
10889 |
+
19654
|
10890 |
+
19658
|
10891 |
+
19659
|
10892 |
+
19665
|
10893 |
+
19666
|
10894 |
+
19673
|
10895 |
+
19677
|
10896 |
+
19678
|
10897 |
+
19679
|
10898 |
+
19680
|
10899 |
+
19682
|
10900 |
+
19683
|
10901 |
+
19685
|
10902 |
+
19686
|
10903 |
+
19687
|
10904 |
+
19688
|
10905 |
+
19689
|
10906 |
+
19691
|
10907 |
+
19692
|
10908 |
+
19693
|
10909 |
+
19694
|
10910 |
+
19697
|
10911 |
+
19698
|
10912 |
+
19699
|
10913 |
+
19700
|
10914 |
+
19702
|
10915 |
+
19703
|
10916 |
+
19704
|
10917 |
+
19706
|
10918 |
+
19707
|
10919 |
+
19708
|
10920 |
+
19709
|
10921 |
+
19715
|
10922 |
+
19717
|
10923 |
+
19719
|
10924 |
+
19720
|
10925 |
+
19724
|
10926 |
+
19727
|
10927 |
+
19728
|
10928 |
+
19730
|
10929 |
+
19731
|
10930 |
+
19732
|
10931 |
+
19734
|
10932 |
+
19738
|
10933 |
+
19742
|
10934 |
+
19743
|
10935 |
+
19744
|
10936 |
+
19747
|
10937 |
+
19748
|
10938 |
+
19751
|
10939 |
+
19755
|
10940 |
+
19756
|
10941 |
+
19766
|
10942 |
+
19767
|
10943 |
+
19768
|
10944 |
+
19769
|
10945 |
+
19773
|
10946 |
+
19774
|
10947 |
+
19777
|
10948 |
+
19779
|
10949 |
+
19780
|
10950 |
+
19783
|
10951 |
+
19784
|
10952 |
+
19787
|
10953 |
+
19792
|
10954 |
+
19802
|
10955 |
+
19805
|
10956 |
+
19807
|
10957 |
+
19808
|
10958 |
+
19811
|
10959 |
+
19812
|
10960 |
+
19815
|
10961 |
+
19823
|
10962 |
+
19827
|
10963 |
+
19829
|
10964 |
+
19841
|
10965 |
+
19843
|
10966 |
+
19845
|
10967 |
+
19847
|
10968 |
+
19848
|
10969 |
+
19849
|
10970 |
+
19850
|
10971 |
+
19851
|
10972 |
+
19855
|
10973 |
+
19862
|
10974 |
+
19868
|
10975 |
+
19871
|
10976 |
+
19872
|
10977 |
+
19873
|
10978 |
+
19874
|
10979 |
+
19875
|
10980 |
+
19876
|
10981 |
+
19877
|
10982 |
+
19881
|
10983 |
+
19882
|
10984 |
+
19883
|
10985 |
+
19884
|
10986 |
+
19889
|
10987 |
+
19892
|
10988 |
+
19897
|
10989 |
+
19899
|
10990 |
+
19904
|
10991 |
+
19905
|
10992 |
+
19906
|
10993 |
+
19907
|
10994 |
+
19912
|
10995 |
+
19915
|
10996 |
+
19917
|
10997 |
+
19918
|
10998 |
+
19919
|
10999 |
+
19920
|
11000 |
+
19924
|
11001 |
+
19926
|
11002 |
+
19927
|
11003 |
+
19928
|
11004 |
+
19932
|
11005 |
+
19934
|
11006 |
+
19936
|
11007 |
+
19937
|
11008 |
+
19938
|
11009 |
+
19939
|
11010 |
+
19940
|
11011 |
+
19943
|
11012 |
+
19945
|
11013 |
+
19948
|
11014 |
+
19953
|
11015 |
+
19955
|
11016 |
+
19956
|
11017 |
+
19957
|
11018 |
+
19962
|
11019 |
+
19963
|
11020 |
+
19966
|
11021 |
+
19967
|
11022 |
+
19974
|
11023 |
+
19977
|
11024 |
+
19978
|
11025 |
+
19979
|
11026 |
+
19980
|
11027 |
+
19981
|
11028 |
+
19982
|
11029 |
+
19983
|
11030 |
+
19984
|
11031 |
+
19985
|
11032 |
+
19986
|
11033 |
+
19988
|
11034 |
+
19995
|
11035 |
+
19998
|
11036 |
+
20000
|
11037 |
+
20002
|
11038 |
+
20007
|
11039 |
+
20008
|
11040 |
+
20009
|
11041 |
+
20011
|
11042 |
+
20018
|
11043 |
+
20020
|
11044 |
+
20028
|
11045 |
+
989
|
11046 |
+
20029
|
11047 |
+
20030
|
11048 |
+
20033
|
11049 |
+
20034
|
11050 |
+
20035
|
11051 |
+
20038
|
11052 |
+
20039
|
11053 |
+
20041
|
11054 |
+
20046
|
11055 |
+
20051
|
11056 |
+
20052
|
11057 |
+
20053
|
11058 |
+
20054
|
11059 |
+
20056
|
11060 |
+
20058
|
11061 |
+
20059
|
11062 |
+
20062
|
11063 |
+
20063
|
11064 |
+
20064
|
11065 |
+
20065
|
11066 |
+
20066
|
11067 |
+
20067
|
11068 |
+
20070
|
11069 |
+
20071
|
11070 |
+
20072
|
11071 |
+
20073
|
11072 |
+
20075
|
11073 |
+
20077
|
11074 |
+
20079
|
11075 |
+
20080
|
11076 |
+
20083
|
11077 |
+
20090
|
11078 |
+
20091
|
11079 |
+
20093
|
11080 |
+
20095
|
11081 |
+
20096
|
11082 |
+
20101
|
11083 |
+
20102
|
11084 |
+
20106
|
11085 |
+
20107
|
11086 |
+
20109
|
11087 |
+
20111
|
11088 |
+
20112
|
11089 |
+
20113
|
11090 |
+
20117
|
11091 |
+
20119
|
11092 |
+
20120
|
11093 |
+
20124
|
11094 |
+
20125
|
11095 |
+
20126
|
11096 |
+
20127
|
11097 |
+
20131
|
11098 |
+
20132
|
11099 |
+
20135
|
11100 |
+
20136
|
11101 |
+
20137
|
11102 |
+
20138
|
11103 |
+
20149
|
11104 |
+
20150
|
11105 |
+
20152
|
11106 |
+
20154
|
11107 |
+
20156
|
11108 |
+
20157
|
11109 |
+
20158
|
11110 |
+
20162
|
11111 |
+
20163
|
11112 |
+
20164
|
11113 |
+
20171
|
11114 |
+
20172
|
11115 |
+
20174
|
11116 |
+
20180
|
11117 |
+
20181
|
11118 |
+
20183
|
11119 |
+
20187
|
11120 |
+
20194
|
11121 |
+
20196
|
11122 |
+
20201
|
11123 |
+
20202
|
11124 |
+
20204
|
11125 |
+
20207
|
11126 |
+
20208
|
11127 |
+
20216
|
11128 |
+
20218
|
11129 |
+
20220
|
11130 |
+
20223
|
11131 |
+
20224
|
11132 |
+
20225
|
11133 |
+
20227
|
11134 |
+
20228
|
11135 |
+
20229
|
11136 |
+
20230
|
11137 |
+
20231
|
11138 |
+
20232
|
11139 |
+
20233
|
11140 |
+
20234
|
11141 |
+
20235
|
11142 |
+
20237
|
11143 |
+
20239
|
11144 |
+
20240
|
11145 |
+
20241
|
11146 |
+
20242
|
11147 |
+
20245
|
11148 |
+
20248
|
11149 |
+
20249
|
11150 |
+
20257
|
11151 |
+
20259
|
11152 |
+
20260
|
11153 |
+
20266
|
11154 |
+
20271
|
11155 |
+
20274
|
11156 |
+
20275
|
11157 |
+
20276
|
11158 |
+
20278
|
11159 |
+
20280
|
11160 |
+
20287
|
11161 |
+
20289
|
11162 |
+
20290
|
11163 |
+
20291
|
11164 |
+
20292
|
11165 |
+
20293
|
11166 |
+
20295
|
11167 |
+
20296
|
11168 |
+
20297
|
11169 |
+
20298
|
11170 |
+
20299
|
11171 |
+
20300
|
11172 |
+
20304
|
11173 |
+
20305
|
11174 |
+
20306
|
11175 |
+
20309
|
11176 |
+
20312
|
11177 |
+
20314
|
11178 |
+
20320
|
11179 |
+
20321
|
11180 |
+
20326
|
11181 |
+
20327
|
11182 |
+
20328
|
11183 |
+
20334
|
11184 |
+
20335
|
11185 |
+
20336
|
11186 |
+
20337
|
11187 |
+
20339
|
11188 |
+
20342
|
11189 |
+
20344
|
11190 |
+
20346
|
11191 |
+
20356
|
11192 |
+
20357
|
11193 |
+
20358
|
11194 |
+
20364
|
11195 |
+
20365
|
11196 |
+
20366
|
11197 |
+
20367
|
11198 |
+
20369
|
11199 |
+
20371
|
11200 |
+
20372
|
11201 |
+
20373
|
11202 |
+
20374
|
11203 |
+
20375
|
11204 |
+
20376
|
11205 |
+
20377
|
11206 |
+
20382
|
11207 |
+
20383
|
11208 |
+
20385
|
11209 |
+
20388
|
11210 |
+
20390
|
11211 |
+
20394
|
11212 |
+
20395
|
11213 |
+
20399
|
11214 |
+
20401
|
11215 |
+
20403
|
11216 |
+
20406
|
11217 |
+
20407
|
11218 |
+
20408
|
11219 |
+
20409
|
11220 |
+
20414
|
11221 |
+
20415
|
11222 |
+
20416
|
11223 |
+
20417
|
11224 |
+
20418
|
11225 |
+
20419
|
11226 |
+
20422
|
11227 |
+
20423
|
11228 |
+
20424
|
11229 |
+
20425
|
11230 |
+
20426
|
11231 |
+
20427
|
11232 |
+
20429
|
11233 |
+
20431
|
11234 |
+
20433
|
11235 |
+
20434
|
11236 |
+
20436
|
11237 |
+
20446
|
11238 |
+
20450
|
11239 |
+
20453
|
11240 |
+
20455
|
11241 |
+
20458
|
11242 |
+
20461
|
11243 |
+
20465
|
11244 |
+
20466
|
11245 |
+
20469
|
11246 |
+
990
|
11247 |
+
20479
|
11248 |
+
20480
|
11249 |
+
20481
|
11250 |
+
20483
|
11251 |
+
20484
|
11252 |
+
20486
|
11253 |
+
20488
|
11254 |
+
20489
|
11255 |
+
20493
|
11256 |
+
20497
|
11257 |
+
20500
|
11258 |
+
20501
|
11259 |
+
20502
|
11260 |
+
20503
|
11261 |
+
20506
|
11262 |
+
20507
|
11263 |
+
20508
|
11264 |
+
20512
|
11265 |
+
20513
|
11266 |
+
20514
|
11267 |
+
20519
|
11268 |
+
20520
|
11269 |
+
20523
|
11270 |
+
20524
|
11271 |
+
20525
|
11272 |
+
20526
|
11273 |
+
20527
|
11274 |
+
20528
|
11275 |
+
20531
|
11276 |
+
20532
|
11277 |
+
20534
|
11278 |
+
20536
|
11279 |
+
20537
|
11280 |
+
20538
|
11281 |
+
20542
|
11282 |
+
20546
|
11283 |
+
20550
|
11284 |
+
20551
|
11285 |
+
20554
|
11286 |
+
20557
|
11287 |
+
20558
|
11288 |
+
20560
|
11289 |
+
20561
|
11290 |
+
20562
|
11291 |
+
20563
|
11292 |
+
20565
|
11293 |
+
20566
|
11294 |
+
20570
|
11295 |
+
20571
|
11296 |
+
20574
|
11297 |
+
20576
|
11298 |
+
20577
|
11299 |
+
20580
|
11300 |
+
20583
|
11301 |
+
20585
|
11302 |
+
20586
|
11303 |
+
20589
|
11304 |
+
20591
|
11305 |
+
20594
|
11306 |
+
20597
|
11307 |
+
20598
|
11308 |
+
20599
|
11309 |
+
20600
|
11310 |
+
20602
|
11311 |
+
20603
|
11312 |
+
20604
|
11313 |
+
20605
|
11314 |
+
20606
|
11315 |
+
20609
|
11316 |
+
20611
|
11317 |
+
20612
|
11318 |
+
20614
|
11319 |
+
20615
|
11320 |
+
20617
|
11321 |
+
20621
|
11322 |
+
20622
|
11323 |
+
20625
|
11324 |
+
20626
|
11325 |
+
20627
|
11326 |
+
20629
|
11327 |
+
20630
|
11328 |
+
20632
|
11329 |
+
20634
|
11330 |
+
20636
|
11331 |
+
20637
|
11332 |
+
20638
|
11333 |
+
20639
|
11334 |
+
20643
|
11335 |
+
20647
|
11336 |
+
20649
|
11337 |
+
20650
|
11338 |
+
20651
|
11339 |
+
20652
|
11340 |
+
20659
|
11341 |
+
20660
|
11342 |
+
20662
|
11343 |
+
20664
|
11344 |
+
20668
|
11345 |
+
20669
|
11346 |
+
20670
|
11347 |
+
20671
|
11348 |
+
20672
|
11349 |
+
20673
|
11350 |
+
20674
|
11351 |
+
20675
|
11352 |
+
20676
|
11353 |
+
20677
|
11354 |
+
20678
|
11355 |
+
20680
|
11356 |
+
20687
|
11357 |
+
20688
|
11358 |
+
20690
|
11359 |
+
20692
|
11360 |
+
20695
|
11361 |
+
20696
|
11362 |
+
20697
|
11363 |
+
20698
|
11364 |
+
20700
|
11365 |
+
20701
|
11366 |
+
20702
|
11367 |
+
20703
|
11368 |
+
20704
|
11369 |
+
20709
|
11370 |
+
20710
|
11371 |
+
20712
|
11372 |
+
20713
|
11373 |
+
20714
|
11374 |
+
20715
|
11375 |
+
20718
|
11376 |
+
20719
|
11377 |
+
20720
|
11378 |
+
20721
|
11379 |
+
20722
|
11380 |
+
20723
|
11381 |
+
20724
|
11382 |
+
20725
|
11383 |
+
20727
|
11384 |
+
20728
|
11385 |
+
20732
|
11386 |
+
20733
|
11387 |
+
20735
|
11388 |
+
20736
|
11389 |
+
20740
|
11390 |
+
20741
|
11391 |
+
20742
|
11392 |
+
20746
|
11393 |
+
20750
|
11394 |
+
20757
|
11395 |
+
20762
|
11396 |
+
20763
|
11397 |
+
20764
|
11398 |
+
20765
|
11399 |
+
20771
|
11400 |
+
20772
|
11401 |
+
20775
|
11402 |
+
20776
|
11403 |
+
20785
|
11404 |
+
20791
|
11405 |
+
20794
|
11406 |
+
20795
|
11407 |
+
20796
|
11408 |
+
20797
|
11409 |
+
20798
|
11410 |
+
20799
|
11411 |
+
20800
|
11412 |
+
20801
|
11413 |
+
20802
|
11414 |
+
20805
|
11415 |
+
20806
|
11416 |
+
20808
|
11417 |
+
20814
|
11418 |
+
20816
|
11419 |
+
20817
|
11420 |
+
20820
|
11421 |
+
20822
|
11422 |
+
20827
|
11423 |
+
20831
|
11424 |
+
20832
|
11425 |
+
20833
|
11426 |
+
20838
|
11427 |
+
20839
|
11428 |
+
20841
|
11429 |
+
20844
|
11430 |
+
20845
|
11431 |
+
20847
|
11432 |
+
20848
|
11433 |
+
20851
|
11434 |
+
20855
|
11435 |
+
20856
|
11436 |
+
20858
|
11437 |
+
20859
|
11438 |
+
20860
|
11439 |
+
20861
|
11440 |
+
20862
|
11441 |
+
20863
|
11442 |
+
20867
|
11443 |
+
20868
|
11444 |
+
20869
|
11445 |
+
20871
|
11446 |
+
20874
|
11447 |
+
20878
|
11448 |
+
20881
|
11449 |
+
20884
|
11450 |
+
20885
|
11451 |
+
20888
|
11452 |
+
20889
|
11453 |
+
20890
|
11454 |
+
20892
|
11455 |
+
20894
|
11456 |
+
20898
|
11457 |
+
20903
|
11458 |
+
20905
|
11459 |
+
20906
|
11460 |
+
20908
|
11461 |
+
20910
|
11462 |
+
20911
|
11463 |
+
20912
|
11464 |
+
20914
|
11465 |
+
20916
|
11466 |
+
20919
|
11467 |
+
20921
|
11468 |
+
20925
|
11469 |
+
20927
|
11470 |
+
20928
|
11471 |
+
20929
|
11472 |
+
20934
|
11473 |
+
20935
|
11474 |
+
20944
|
11475 |
+
20945
|
11476 |
+
20946
|
11477 |
+
20948
|
11478 |
+
20949
|
11479 |
+
20950
|
11480 |
+
20951
|
11481 |
+
20954
|
11482 |
+
20955
|
11483 |
+
20957
|
11484 |
+
20958
|
11485 |
+
20963
|
11486 |
+
20964
|
11487 |
+
20967
|
11488 |
+
20971
|
11489 |
+
20976
|
11490 |
+
20987
|
11491 |
+
20988
|
11492 |
+
20989
|
11493 |
+
20990
|
11494 |
+
20993
|
11495 |
+
20996
|
11496 |
+
20997
|
11497 |
+
21006
|
11498 |
+
21009
|
11499 |
+
21010
|
11500 |
+
21015
|
11501 |
+
21017
|
11502 |
+
21020
|
11503 |
+
21022
|
11504 |
+
21025
|
11505 |
+
21032
|
11506 |
+
21033
|
11507 |
+
991
|
11508 |
+
21034
|
11509 |
+
21035
|
11510 |
+
21038
|
11511 |
+
21042
|
11512 |
+
21044
|
11513 |
+
21045
|
11514 |
+
21046
|
11515 |
+
21047
|
11516 |
+
21049
|
11517 |
+
21050
|
11518 |
+
21051
|
11519 |
+
992
|
11520 |
+
21052
|
11521 |
+
21053
|
11522 |
+
21054
|
11523 |
+
21055
|
11524 |
+
21056
|
11525 |
+
21057
|
11526 |
+
21059
|
11527 |
+
21060
|
11528 |
+
21061
|
11529 |
+
21062
|
11530 |
+
21063
|
11531 |
+
21064
|
11532 |
+
21065
|
11533 |
+
21069
|
11534 |
+
21070
|
11535 |
+
21071
|
11536 |
+
21072
|
11537 |
+
21073
|
11538 |
+
21074
|
11539 |
+
21075
|
11540 |
+
21078
|
11541 |
+
21082
|
11542 |
+
21084
|
11543 |
+
21089
|
11544 |
+
21090
|
11545 |
+
21092
|
11546 |
+
21093
|
11547 |
+
21094
|
11548 |
+
21098
|
11549 |
+
21100
|
11550 |
+
21106
|
11551 |
+
21107
|
11552 |
+
21118
|
11553 |
+
21119
|
11554 |
+
21121
|
11555 |
+
21125
|
11556 |
+
21127
|
11557 |
+
21128
|
11558 |
+
21130
|
11559 |
+
21131
|
11560 |
+
21134
|
11561 |
+
21139
|
11562 |
+
21142
|
11563 |
+
21144
|
11564 |
+
21151
|
11565 |
+
21152
|
11566 |
+
21153
|
11567 |
+
21154
|
11568 |
+
21155
|
11569 |
+
21157
|
11570 |
+
21159
|
11571 |
+
21162
|
11572 |
+
21164
|
11573 |
+
21165
|
11574 |
+
993
|
11575 |
+
21171
|
11576 |
+
21174
|
11577 |
+
21175
|
11578 |
+
994
|
11579 |
+
21176
|
11580 |
+
21178
|
11581 |
+
21183
|
11582 |
+
21184
|
11583 |
+
995
|
11584 |
+
21189
|
11585 |
+
21192
|
11586 |
+
21193
|
11587 |
+
996
|
11588 |
+
21199
|
11589 |
+
21200
|
11590 |
+
21201
|
11591 |
+
997
|
11592 |
+
21202
|
11593 |
+
21203
|
11594 |
+
21205
|
11595 |
+
21220
|
11596 |
+
21222
|
11597 |
+
21225
|
11598 |
+
21230
|
11599 |
+
21233
|
11600 |
+
21239
|
11601 |
+
21253
|
11602 |
+
21258
|
11603 |
+
21269
|
11604 |
+
21270
|
11605 |
+
21272
|
11606 |
+
21280
|
11607 |
+
21282
|
11608 |
+
21283
|
11609 |
+
21285
|
11610 |
+
21287
|
11611 |
+
21288
|
11612 |
+
21298
|
11613 |
+
21304
|
11614 |
+
21306
|
11615 |
+
21308
|
11616 |
+
21312
|
11617 |
+
21313
|
11618 |
+
21314
|
11619 |
+
21316
|
11620 |
+
21317
|
11621 |
+
21318
|
11622 |
+
21319
|
11623 |
+
21320
|
11624 |
+
21322
|
11625 |
+
21328
|
11626 |
+
21337
|
11627 |
+
21338
|
11628 |
+
21340
|
11629 |
+
21344
|
11630 |
+
21346
|
11631 |
+
21350
|
11632 |
+
21353
|
11633 |
+
21357
|
11634 |
+
21358
|
11635 |
+
21359
|
11636 |
+
21362
|
11637 |
+
21364
|
11638 |
+
998
|
11639 |
+
21370
|
11640 |
+
21374
|
11641 |
+
21376
|
11642 |
+
21377
|
11643 |
+
21378
|
11644 |
+
21382
|
11645 |
+
21386
|
11646 |
+
21388
|
11647 |
+
21389
|
11648 |
+
21397
|
11649 |
+
21398
|
11650 |
+
21402
|
11651 |
+
21407
|
11652 |
+
21408
|
11653 |
+
21411
|
11654 |
+
21414
|
11655 |
+
21415
|
11656 |
+
21419
|
11657 |
+
21425
|
11658 |
+
21428
|
11659 |
+
21429
|
11660 |
+
21431
|
11661 |
+
21432
|
11662 |
+
21433
|
11663 |
+
21438
|
11664 |
+
21441
|
11665 |
+
21451
|
11666 |
+
21459
|
11667 |
+
21464
|
11668 |
+
21467
|
11669 |
+
21469
|
11670 |
+
21476
|
11671 |
+
21479
|
11672 |
+
21484
|
11673 |
+
21485
|
11674 |
+
21486
|
11675 |
+
21489
|
11676 |
+
21494
|
11677 |
+
21495
|
11678 |
+
21497
|
11679 |
+
21501
|
11680 |
+
21502
|
11681 |
+
21507
|
11682 |
+
21511
|
11683 |
+
21515
|
11684 |
+
21516
|
11685 |
+
21517
|
11686 |
+
21519
|
11687 |
+
21522
|
11688 |
+
21524
|
11689 |
+
21528
|
11690 |
+
21529
|
11691 |
+
21532
|
11692 |
+
21533
|
11693 |
+
21534
|
11694 |
+
21537
|
11695 |
+
21541
|
11696 |
+
21545
|
11697 |
+
21547
|
11698 |
+
21548
|
11699 |
+
21549
|
11700 |
+
21550
|
11701 |
+
21554
|
11702 |
+
21560
|
11703 |
+
21563
|
11704 |
+
21569
|
11705 |
+
21573
|
11706 |
+
21576
|
11707 |
+
21578
|
11708 |
+
21579
|
11709 |
+
21580
|
11710 |
+
21581
|
11711 |
+
21585
|
11712 |
+
21589
|
11713 |
+
21590
|
11714 |
+
21591
|
11715 |
+
21598
|
11716 |
+
21601
|
11717 |
+
21604
|
11718 |
+
21606
|
11719 |
+
21611
|
11720 |
+
21615
|
11721 |
+
21618
|
11722 |
+
21620
|
11723 |
+
21623
|
11724 |
+
21625
|
11725 |
+
21627
|
11726 |
+
21635
|
11727 |
+
21637
|
11728 |
+
21638
|
11729 |
+
21641
|
11730 |
+
21644
|
11731 |
+
21645
|
11732 |
+
21648
|
11733 |
+
21649
|
11734 |
+
21650
|
11735 |
+
21659
|
11736 |
+
21661
|
11737 |
+
21662
|
11738 |
+
21663
|
11739 |
+
21665
|
11740 |
+
21666
|
11741 |
+
21668
|
11742 |
+
21669
|
11743 |
+
21672
|
11744 |
+
21673
|
11745 |
+
21675
|
11746 |
+
21678
|
11747 |
+
21679
|
11748 |
+
21680
|
11749 |
+
21686
|
11750 |
+
21688
|
11751 |
+
21689
|
11752 |
+
21690
|
11753 |
+
21692
|
11754 |
+
21702
|
11755 |
+
21711
|
11756 |
+
21712
|
11757 |
+
21713
|
11758 |
+
21716
|
11759 |
+
21717
|
11760 |
+
21721
|
11761 |
+
21722
|
11762 |
+
21723
|
11763 |
+
21724
|
11764 |
+
21727
|
11765 |
+
21728
|
11766 |
+
21729
|
11767 |
+
21734
|
11768 |
+
21739
|
11769 |
+
21740
|
11770 |
+
21741
|
11771 |
+
21743
|
11772 |
+
21744
|
11773 |
+
21747
|
11774 |
+
21748
|
11775 |
+
21749
|
11776 |
+
21754
|
11777 |
+
21757
|
11778 |
+
21758
|
11779 |
+
21760
|
11780 |
+
21761
|
11781 |
+
21762
|
11782 |
+
21763
|
11783 |
+
21765
|
11784 |
+
21772
|
11785 |
+
21773
|
11786 |
+
21774
|
11787 |
+
21777
|
11788 |
+
21778
|
11789 |
+
21781
|
11790 |
+
21782
|
11791 |
+
21784
|
11792 |
+
21786
|
11793 |
+
21791
|
11794 |
+
21792
|
11795 |
+
21795
|
11796 |
+
21796
|
11797 |
+
21800
|
11798 |
+
21801
|
11799 |
+
21803
|
11800 |
+
21804
|
11801 |
+
21806
|
11802 |
+
21811
|
11803 |
+
21815
|
11804 |
+
21816
|
11805 |
+
999
|
11806 |
+
21817
|
11807 |
+
21818
|
11808 |
+
21822
|
11809 |
+
21825
|
11810 |
+
21826
|
11811 |
+
21827
|
11812 |
+
21828
|
11813 |
+
21829
|
11814 |
+
21830
|
11815 |
+
21831
|
11816 |
+
21833
|
11817 |
+
21835
|
11818 |
+
21837
|
11819 |
+
21838
|
11820 |
+
21840
|
11821 |
+
21841
|
pytorch-image-models/timm/data/_info/imagenet22k_ms_to_22k_indices.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
pytorch-image-models/timm/data/_info/imagenet22k_synsets.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
pytorch-image-models/timm/data/_info/imagenet_a_synsets.txt
ADDED
@@ -0,0 +1,200 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
n01498041
|
2 |
+
n01531178
|
3 |
+
n01534433
|
4 |
+
n01558993
|
5 |
+
n01580077
|
6 |
+
n01614925
|
7 |
+
n01616318
|
8 |
+
n01631663
|
9 |
+
n01641577
|
10 |
+
n01669191
|
11 |
+
n01677366
|
12 |
+
n01687978
|
13 |
+
n01694178
|
14 |
+
n01698640
|
15 |
+
n01735189
|
16 |
+
n01770081
|
17 |
+
n01770393
|
18 |
+
n01774750
|
19 |
+
n01784675
|
20 |
+
n01819313
|
21 |
+
n01820546
|
22 |
+
n01833805
|
23 |
+
n01843383
|
24 |
+
n01847000
|
25 |
+
n01855672
|
26 |
+
n01882714
|
27 |
+
n01910747
|
28 |
+
n01914609
|
29 |
+
n01924916
|
30 |
+
n01944390
|
31 |
+
n01985128
|
32 |
+
n01986214
|
33 |
+
n02007558
|
34 |
+
n02009912
|
35 |
+
n02037110
|
36 |
+
n02051845
|
37 |
+
n02077923
|
38 |
+
n02085620
|
39 |
+
n02099601
|
40 |
+
n02106550
|
41 |
+
n02106662
|
42 |
+
n02110958
|
43 |
+
n02119022
|
44 |
+
n02123394
|
45 |
+
n02127052
|
46 |
+
n02129165
|
47 |
+
n02133161
|
48 |
+
n02137549
|
49 |
+
n02165456
|
50 |
+
n02174001
|
51 |
+
n02177972
|
52 |
+
n02190166
|
53 |
+
n02206856
|
54 |
+
n02219486
|
55 |
+
n02226429
|
56 |
+
n02231487
|
57 |
+
n02233338
|
58 |
+
n02236044
|
59 |
+
n02259212
|
60 |
+
n02268443
|
61 |
+
n02279972
|
62 |
+
n02280649
|
63 |
+
n02281787
|
64 |
+
n02317335
|
65 |
+
n02325366
|
66 |
+
n02346627
|
67 |
+
n02356798
|
68 |
+
n02361337
|
69 |
+
n02410509
|
70 |
+
n02445715
|
71 |
+
n02454379
|
72 |
+
n02486410
|
73 |
+
n02492035
|
74 |
+
n02504458
|
75 |
+
n02655020
|
76 |
+
n02669723
|
77 |
+
n02672831
|
78 |
+
n02676566
|
79 |
+
n02690373
|
80 |
+
n02701002
|
81 |
+
n02730930
|
82 |
+
n02777292
|
83 |
+
n02782093
|
84 |
+
n02787622
|
85 |
+
n02793495
|
86 |
+
n02797295
|
87 |
+
n02802426
|
88 |
+
n02814860
|
89 |
+
n02815834
|
90 |
+
n02837789
|
91 |
+
n02879718
|
92 |
+
n02883205
|
93 |
+
n02895154
|
94 |
+
n02906734
|
95 |
+
n02948072
|
96 |
+
n02951358
|
97 |
+
n02980441
|
98 |
+
n02992211
|
99 |
+
n02999410
|
100 |
+
n03014705
|
101 |
+
n03026506
|
102 |
+
n03124043
|
103 |
+
n03125729
|
104 |
+
n03187595
|
105 |
+
n03196217
|
106 |
+
n03223299
|
107 |
+
n03250847
|
108 |
+
n03255030
|
109 |
+
n03291819
|
110 |
+
n03325584
|
111 |
+
n03355925
|
112 |
+
n03384352
|
113 |
+
n03388043
|
114 |
+
n03417042
|
115 |
+
n03443371
|
116 |
+
n03444034
|
117 |
+
n03445924
|
118 |
+
n03452741
|
119 |
+
n03483316
|
120 |
+
n03584829
|
121 |
+
n03590841
|
122 |
+
n03594945
|
123 |
+
n03617480
|
124 |
+
n03666591
|
125 |
+
n03670208
|
126 |
+
n03717622
|
127 |
+
n03720891
|
128 |
+
n03721384
|
129 |
+
n03724870
|
130 |
+
n03775071
|
131 |
+
n03788195
|
132 |
+
n03804744
|
133 |
+
n03837869
|
134 |
+
n03840681
|
135 |
+
n03854065
|
136 |
+
n03888257
|
137 |
+
n03891332
|
138 |
+
n03935335
|
139 |
+
n03982430
|
140 |
+
n04019541
|
141 |
+
n04033901
|
142 |
+
n04039381
|
143 |
+
n04067472
|
144 |
+
n04086273
|
145 |
+
n04099969
|
146 |
+
n04118538
|
147 |
+
n04131690
|
148 |
+
n04133789
|
149 |
+
n04141076
|
150 |
+
n04146614
|
151 |
+
n04147183
|
152 |
+
n04179913
|
153 |
+
n04208210
|
154 |
+
n04235860
|
155 |
+
n04252077
|
156 |
+
n04252225
|
157 |
+
n04254120
|
158 |
+
n04270147
|
159 |
+
n04275548
|
160 |
+
n04310018
|
161 |
+
n04317175
|
162 |
+
n04344873
|
163 |
+
n04347754
|
164 |
+
n04355338
|
165 |
+
n04366367
|
166 |
+
n04376876
|
167 |
+
n04389033
|
168 |
+
n04399382
|
169 |
+
n04442312
|
170 |
+
n04456115
|
171 |
+
n04482393
|
172 |
+
n04507155
|
173 |
+
n04509417
|
174 |
+
n04532670
|
175 |
+
n04540053
|
176 |
+
n04554684
|
177 |
+
n04562935
|
178 |
+
n04591713
|
179 |
+
n04606251
|
180 |
+
n07583066
|
181 |
+
n07695742
|
182 |
+
n07697313
|
183 |
+
n07697537
|
184 |
+
n07714990
|
185 |
+
n07718472
|
186 |
+
n07720875
|
187 |
+
n07734744
|
188 |
+
n07749582
|
189 |
+
n07753592
|
190 |
+
n07760859
|
191 |
+
n07768694
|
192 |
+
n07831146
|
193 |
+
n09229709
|
194 |
+
n09246464
|
195 |
+
n09472597
|
196 |
+
n09835506
|
197 |
+
n11879895
|
198 |
+
n12057211
|
199 |
+
n12144580
|
200 |
+
n12267677
|
pytorch-image-models/timm/data/_info/imagenet_r_indices.txt
ADDED
@@ -0,0 +1,200 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
1
|
2 |
+
2
|
3 |
+
4
|
4 |
+
6
|
5 |
+
8
|
6 |
+
9
|
7 |
+
11
|
8 |
+
13
|
9 |
+
22
|
10 |
+
23
|
11 |
+
26
|
12 |
+
29
|
13 |
+
31
|
14 |
+
39
|
15 |
+
47
|
16 |
+
63
|
17 |
+
71
|
18 |
+
76
|
19 |
+
79
|
20 |
+
84
|
21 |
+
90
|
22 |
+
94
|
23 |
+
96
|
24 |
+
97
|
25 |
+
99
|
26 |
+
100
|
27 |
+
105
|
28 |
+
107
|
29 |
+
113
|
30 |
+
122
|
31 |
+
125
|
32 |
+
130
|
33 |
+
132
|
34 |
+
144
|
35 |
+
145
|
36 |
+
147
|
37 |
+
148
|
38 |
+
150
|
39 |
+
151
|
40 |
+
155
|
41 |
+
160
|
42 |
+
161
|
43 |
+
162
|
44 |
+
163
|
45 |
+
171
|
46 |
+
172
|
47 |
+
178
|
48 |
+
187
|
49 |
+
195
|
50 |
+
199
|
51 |
+
203
|
52 |
+
207
|
53 |
+
208
|
54 |
+
219
|
55 |
+
231
|
56 |
+
232
|
57 |
+
234
|
58 |
+
235
|
59 |
+
242
|
60 |
+
245
|
61 |
+
247
|
62 |
+
250
|
63 |
+
251
|
64 |
+
254
|
65 |
+
259
|
66 |
+
260
|
67 |
+
263
|
68 |
+
265
|
69 |
+
267
|
70 |
+
269
|
71 |
+
276
|
72 |
+
277
|
73 |
+
281
|
74 |
+
288
|
75 |
+
289
|
76 |
+
291
|
77 |
+
292
|
78 |
+
293
|
79 |
+
296
|
80 |
+
299
|
81 |
+
301
|
82 |
+
308
|
83 |
+
309
|
84 |
+
310
|
85 |
+
311
|
86 |
+
314
|
87 |
+
315
|
88 |
+
319
|
89 |
+
323
|
90 |
+
327
|
91 |
+
330
|
92 |
+
334
|
93 |
+
335
|
94 |
+
337
|
95 |
+
338
|
96 |
+
340
|
97 |
+
341
|
98 |
+
344
|
99 |
+
347
|
100 |
+
353
|
101 |
+
355
|
102 |
+
361
|
103 |
+
362
|
104 |
+
365
|
105 |
+
366
|
106 |
+
367
|
107 |
+
368
|
108 |
+
372
|
109 |
+
388
|
110 |
+
390
|
111 |
+
393
|
112 |
+
397
|
113 |
+
401
|
114 |
+
407
|
115 |
+
413
|
116 |
+
414
|
117 |
+
425
|
118 |
+
428
|
119 |
+
430
|
120 |
+
435
|
121 |
+
437
|
122 |
+
441
|
123 |
+
447
|
124 |
+
448
|
125 |
+
457
|
126 |
+
462
|
127 |
+
463
|
128 |
+
469
|
129 |
+
470
|
130 |
+
471
|
131 |
+
472
|
132 |
+
476
|
133 |
+
483
|
134 |
+
487
|
135 |
+
515
|
136 |
+
546
|
137 |
+
555
|
138 |
+
558
|
139 |
+
570
|
140 |
+
579
|
141 |
+
583
|
142 |
+
587
|
143 |
+
593
|
144 |
+
594
|
145 |
+
596
|
146 |
+
609
|
147 |
+
613
|
148 |
+
617
|
149 |
+
621
|
150 |
+
629
|
151 |
+
637
|
152 |
+
657
|
153 |
+
658
|
154 |
+
701
|
155 |
+
717
|
156 |
+
724
|
157 |
+
763
|
158 |
+
768
|
159 |
+
774
|
160 |
+
776
|
161 |
+
779
|
162 |
+
780
|
163 |
+
787
|
164 |
+
805
|
165 |
+
812
|
166 |
+
815
|
167 |
+
820
|
168 |
+
824
|
169 |
+
833
|
170 |
+
847
|
171 |
+
852
|
172 |
+
866
|
173 |
+
875
|
174 |
+
883
|
175 |
+
889
|
176 |
+
895
|
177 |
+
907
|
178 |
+
928
|
179 |
+
931
|
180 |
+
932
|
181 |
+
933
|
182 |
+
934
|
183 |
+
936
|
184 |
+
937
|
185 |
+
943
|
186 |
+
945
|
187 |
+
947
|
188 |
+
948
|
189 |
+
949
|
190 |
+
951
|
191 |
+
953
|
192 |
+
954
|
193 |
+
957
|
194 |
+
963
|
195 |
+
965
|
196 |
+
967
|
197 |
+
980
|
198 |
+
981
|
199 |
+
983
|
200 |
+
988
|
pytorch-image-models/timm/data/_info/imagenet_r_synsets.txt
ADDED
@@ -0,0 +1,200 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
n01443537
|
2 |
+
n01484850
|
3 |
+
n01494475
|
4 |
+
n01498041
|
5 |
+
n01514859
|
6 |
+
n01518878
|
7 |
+
n01531178
|
8 |
+
n01534433
|
9 |
+
n01614925
|
10 |
+
n01616318
|
11 |
+
n01630670
|
12 |
+
n01632777
|
13 |
+
n01644373
|
14 |
+
n01677366
|
15 |
+
n01694178
|
16 |
+
n01748264
|
17 |
+
n01770393
|
18 |
+
n01774750
|
19 |
+
n01784675
|
20 |
+
n01806143
|
21 |
+
n01820546
|
22 |
+
n01833805
|
23 |
+
n01843383
|
24 |
+
n01847000
|
25 |
+
n01855672
|
26 |
+
n01860187
|
27 |
+
n01882714
|
28 |
+
n01910747
|
29 |
+
n01944390
|
30 |
+
n01983481
|
31 |
+
n01986214
|
32 |
+
n02007558
|
33 |
+
n02009912
|
34 |
+
n02051845
|
35 |
+
n02056570
|
36 |
+
n02066245
|
37 |
+
n02071294
|
38 |
+
n02077923
|
39 |
+
n02085620
|
40 |
+
n02086240
|
41 |
+
n02088094
|
42 |
+
n02088238
|
43 |
+
n02088364
|
44 |
+
n02088466
|
45 |
+
n02091032
|
46 |
+
n02091134
|
47 |
+
n02092339
|
48 |
+
n02094433
|
49 |
+
n02096585
|
50 |
+
n02097298
|
51 |
+
n02098286
|
52 |
+
n02099601
|
53 |
+
n02099712
|
54 |
+
n02102318
|
55 |
+
n02106030
|
56 |
+
n02106166
|
57 |
+
n02106550
|
58 |
+
n02106662
|
59 |
+
n02108089
|
60 |
+
n02108915
|
61 |
+
n02109525
|
62 |
+
n02110185
|
63 |
+
n02110341
|
64 |
+
n02110958
|
65 |
+
n02112018
|
66 |
+
n02112137
|
67 |
+
n02113023
|
68 |
+
n02113624
|
69 |
+
n02113799
|
70 |
+
n02114367
|
71 |
+
n02117135
|
72 |
+
n02119022
|
73 |
+
n02123045
|
74 |
+
n02128385
|
75 |
+
n02128757
|
76 |
+
n02129165
|
77 |
+
n02129604
|
78 |
+
n02130308
|
79 |
+
n02134084
|
80 |
+
n02138441
|
81 |
+
n02165456
|
82 |
+
n02190166
|
83 |
+
n02206856
|
84 |
+
n02219486
|
85 |
+
n02226429
|
86 |
+
n02233338
|
87 |
+
n02236044
|
88 |
+
n02268443
|
89 |
+
n02279972
|
90 |
+
n02317335
|
91 |
+
n02325366
|
92 |
+
n02346627
|
93 |
+
n02356798
|
94 |
+
n02363005
|
95 |
+
n02364673
|
96 |
+
n02391049
|
97 |
+
n02395406
|
98 |
+
n02398521
|
99 |
+
n02410509
|
100 |
+
n02423022
|
101 |
+
n02437616
|
102 |
+
n02445715
|
103 |
+
n02447366
|
104 |
+
n02480495
|
105 |
+
n02480855
|
106 |
+
n02481823
|
107 |
+
n02483362
|
108 |
+
n02486410
|
109 |
+
n02510455
|
110 |
+
n02526121
|
111 |
+
n02607072
|
112 |
+
n02655020
|
113 |
+
n02672831
|
114 |
+
n02701002
|
115 |
+
n02749479
|
116 |
+
n02769748
|
117 |
+
n02793495
|
118 |
+
n02797295
|
119 |
+
n02802426
|
120 |
+
n02808440
|
121 |
+
n02814860
|
122 |
+
n02823750
|
123 |
+
n02841315
|
124 |
+
n02843684
|
125 |
+
n02883205
|
126 |
+
n02906734
|
127 |
+
n02909870
|
128 |
+
n02939185
|
129 |
+
n02948072
|
130 |
+
n02950826
|
131 |
+
n02951358
|
132 |
+
n02966193
|
133 |
+
n02980441
|
134 |
+
n02992529
|
135 |
+
n03124170
|
136 |
+
n03272010
|
137 |
+
n03345487
|
138 |
+
n03372029
|
139 |
+
n03424325
|
140 |
+
n03452741
|
141 |
+
n03467068
|
142 |
+
n03481172
|
143 |
+
n03494278
|
144 |
+
n03495258
|
145 |
+
n03498962
|
146 |
+
n03594945
|
147 |
+
n03602883
|
148 |
+
n03630383
|
149 |
+
n03649909
|
150 |
+
n03676483
|
151 |
+
n03710193
|
152 |
+
n03773504
|
153 |
+
n03775071
|
154 |
+
n03888257
|
155 |
+
n03930630
|
156 |
+
n03947888
|
157 |
+
n04086273
|
158 |
+
n04118538
|
159 |
+
n04133789
|
160 |
+
n04141076
|
161 |
+
n04146614
|
162 |
+
n04147183
|
163 |
+
n04192698
|
164 |
+
n04254680
|
165 |
+
n04266014
|
166 |
+
n04275548
|
167 |
+
n04310018
|
168 |
+
n04325704
|
169 |
+
n04347754
|
170 |
+
n04389033
|
171 |
+
n04409515
|
172 |
+
n04465501
|
173 |
+
n04487394
|
174 |
+
n04522168
|
175 |
+
n04536866
|
176 |
+
n04552348
|
177 |
+
n04591713
|
178 |
+
n07614500
|
179 |
+
n07693725
|
180 |
+
n07695742
|
181 |
+
n07697313
|
182 |
+
n07697537
|
183 |
+
n07714571
|
184 |
+
n07714990
|
185 |
+
n07718472
|
186 |
+
n07720875
|
187 |
+
n07734744
|
188 |
+
n07742313
|
189 |
+
n07745940
|
190 |
+
n07749582
|
191 |
+
n07753275
|
192 |
+
n07753592
|
193 |
+
n07768694
|
194 |
+
n07873807
|
195 |
+
n07880968
|
196 |
+
n07920052
|
197 |
+
n09472597
|
198 |
+
n09835506
|
199 |
+
n10565667
|
200 |
+
n12267677
|
pytorch-image-models/timm/data/_info/imagenet_real_labels.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
pytorch-image-models/timm/data/_info/imagenet_synset_to_definition.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
pytorch-image-models/timm/data/_info/imagenet_synset_to_lemma.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
pytorch-image-models/timm/data/_info/imagenet_synsets.txt
ADDED
@@ -0,0 +1,1000 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
n01440764
|
2 |
+
n01443537
|
3 |
+
n01484850
|
4 |
+
n01491361
|
5 |
+
n01494475
|
6 |
+
n01496331
|
7 |
+
n01498041
|
8 |
+
n01514668
|
9 |
+
n01514859
|
10 |
+
n01518878
|
11 |
+
n01530575
|
12 |
+
n01531178
|
13 |
+
n01532829
|
14 |
+
n01534433
|
15 |
+
n01537544
|
16 |
+
n01558993
|
17 |
+
n01560419
|
18 |
+
n01580077
|
19 |
+
n01582220
|
20 |
+
n01592084
|
21 |
+
n01601694
|
22 |
+
n01608432
|
23 |
+
n01614925
|
24 |
+
n01616318
|
25 |
+
n01622779
|
26 |
+
n01629819
|
27 |
+
n01630670
|
28 |
+
n01631663
|
29 |
+
n01632458
|
30 |
+
n01632777
|
31 |
+
n01641577
|
32 |
+
n01644373
|
33 |
+
n01644900
|
34 |
+
n01664065
|
35 |
+
n01665541
|
36 |
+
n01667114
|
37 |
+
n01667778
|
38 |
+
n01669191
|
39 |
+
n01675722
|
40 |
+
n01677366
|
41 |
+
n01682714
|
42 |
+
n01685808
|
43 |
+
n01687978
|
44 |
+
n01688243
|
45 |
+
n01689811
|
46 |
+
n01692333
|
47 |
+
n01693334
|
48 |
+
n01694178
|
49 |
+
n01695060
|
50 |
+
n01697457
|
51 |
+
n01698640
|
52 |
+
n01704323
|
53 |
+
n01728572
|
54 |
+
n01728920
|
55 |
+
n01729322
|
56 |
+
n01729977
|
57 |
+
n01734418
|
58 |
+
n01735189
|
59 |
+
n01737021
|
60 |
+
n01739381
|
61 |
+
n01740131
|
62 |
+
n01742172
|
63 |
+
n01744401
|
64 |
+
n01748264
|
65 |
+
n01749939
|
66 |
+
n01751748
|
67 |
+
n01753488
|
68 |
+
n01755581
|
69 |
+
n01756291
|
70 |
+
n01768244
|
71 |
+
n01770081
|
72 |
+
n01770393
|
73 |
+
n01773157
|
74 |
+
n01773549
|
75 |
+
n01773797
|
76 |
+
n01774384
|
77 |
+
n01774750
|
78 |
+
n01775062
|
79 |
+
n01776313
|
80 |
+
n01784675
|
81 |
+
n01795545
|
82 |
+
n01796340
|
83 |
+
n01797886
|
84 |
+
n01798484
|
85 |
+
n01806143
|
86 |
+
n01806567
|
87 |
+
n01807496
|
88 |
+
n01817953
|
89 |
+
n01818515
|
90 |
+
n01819313
|
91 |
+
n01820546
|
92 |
+
n01824575
|
93 |
+
n01828970
|
94 |
+
n01829413
|
95 |
+
n01833805
|
96 |
+
n01843065
|
97 |
+
n01843383
|
98 |
+
n01847000
|
99 |
+
n01855032
|
100 |
+
n01855672
|
101 |
+
n01860187
|
102 |
+
n01871265
|
103 |
+
n01872401
|
104 |
+
n01873310
|
105 |
+
n01877812
|
106 |
+
n01882714
|
107 |
+
n01883070
|
108 |
+
n01910747
|
109 |
+
n01914609
|
110 |
+
n01917289
|
111 |
+
n01924916
|
112 |
+
n01930112
|
113 |
+
n01943899
|
114 |
+
n01944390
|
115 |
+
n01945685
|
116 |
+
n01950731
|
117 |
+
n01955084
|
118 |
+
n01968897
|
119 |
+
n01978287
|
120 |
+
n01978455
|
121 |
+
n01980166
|
122 |
+
n01981276
|
123 |
+
n01983481
|
124 |
+
n01984695
|
125 |
+
n01985128
|
126 |
+
n01986214
|
127 |
+
n01990800
|
128 |
+
n02002556
|
129 |
+
n02002724
|
130 |
+
n02006656
|
131 |
+
n02007558
|
132 |
+
n02009229
|
133 |
+
n02009912
|
134 |
+
n02011460
|
135 |
+
n02012849
|
136 |
+
n02013706
|
137 |
+
n02017213
|
138 |
+
n02018207
|
139 |
+
n02018795
|
140 |
+
n02025239
|
141 |
+
n02027492
|
142 |
+
n02028035
|
143 |
+
n02033041
|
144 |
+
n02037110
|
145 |
+
n02051845
|
146 |
+
n02056570
|
147 |
+
n02058221
|
148 |
+
n02066245
|
149 |
+
n02071294
|
150 |
+
n02074367
|
151 |
+
n02077923
|
152 |
+
n02085620
|
153 |
+
n02085782
|
154 |
+
n02085936
|
155 |
+
n02086079
|
156 |
+
n02086240
|
157 |
+
n02086646
|
158 |
+
n02086910
|
159 |
+
n02087046
|
160 |
+
n02087394
|
161 |
+
n02088094
|
162 |
+
n02088238
|
163 |
+
n02088364
|
164 |
+
n02088466
|
165 |
+
n02088632
|
166 |
+
n02089078
|
167 |
+
n02089867
|
168 |
+
n02089973
|
169 |
+
n02090379
|
170 |
+
n02090622
|
171 |
+
n02090721
|
172 |
+
n02091032
|
173 |
+
n02091134
|
174 |
+
n02091244
|
175 |
+
n02091467
|
176 |
+
n02091635
|
177 |
+
n02091831
|
178 |
+
n02092002
|
179 |
+
n02092339
|
180 |
+
n02093256
|
181 |
+
n02093428
|
182 |
+
n02093647
|
183 |
+
n02093754
|
184 |
+
n02093859
|
185 |
+
n02093991
|
186 |
+
n02094114
|
187 |
+
n02094258
|
188 |
+
n02094433
|
189 |
+
n02095314
|
190 |
+
n02095570
|
191 |
+
n02095889
|
192 |
+
n02096051
|
193 |
+
n02096177
|
194 |
+
n02096294
|
195 |
+
n02096437
|
196 |
+
n02096585
|
197 |
+
n02097047
|
198 |
+
n02097130
|
199 |
+
n02097209
|
200 |
+
n02097298
|
201 |
+
n02097474
|
202 |
+
n02097658
|
203 |
+
n02098105
|
204 |
+
n02098286
|
205 |
+
n02098413
|
206 |
+
n02099267
|
207 |
+
n02099429
|
208 |
+
n02099601
|
209 |
+
n02099712
|
210 |
+
n02099849
|
211 |
+
n02100236
|
212 |
+
n02100583
|
213 |
+
n02100735
|
214 |
+
n02100877
|
215 |
+
n02101006
|
216 |
+
n02101388
|
217 |
+
n02101556
|
218 |
+
n02102040
|
219 |
+
n02102177
|
220 |
+
n02102318
|
221 |
+
n02102480
|
222 |
+
n02102973
|
223 |
+
n02104029
|
224 |
+
n02104365
|
225 |
+
n02105056
|
226 |
+
n02105162
|
227 |
+
n02105251
|
228 |
+
n02105412
|
229 |
+
n02105505
|
230 |
+
n02105641
|
231 |
+
n02105855
|
232 |
+
n02106030
|
233 |
+
n02106166
|
234 |
+
n02106382
|
235 |
+
n02106550
|
236 |
+
n02106662
|
237 |
+
n02107142
|
238 |
+
n02107312
|
239 |
+
n02107574
|
240 |
+
n02107683
|
241 |
+
n02107908
|
242 |
+
n02108000
|
243 |
+
n02108089
|
244 |
+
n02108422
|
245 |
+
n02108551
|
246 |
+
n02108915
|
247 |
+
n02109047
|
248 |
+
n02109525
|
249 |
+
n02109961
|
250 |
+
n02110063
|
251 |
+
n02110185
|
252 |
+
n02110341
|
253 |
+
n02110627
|
254 |
+
n02110806
|
255 |
+
n02110958
|
256 |
+
n02111129
|
257 |
+
n02111277
|
258 |
+
n02111500
|
259 |
+
n02111889
|
260 |
+
n02112018
|
261 |
+
n02112137
|
262 |
+
n02112350
|
263 |
+
n02112706
|
264 |
+
n02113023
|
265 |
+
n02113186
|
266 |
+
n02113624
|
267 |
+
n02113712
|
268 |
+
n02113799
|
269 |
+
n02113978
|
270 |
+
n02114367
|
271 |
+
n02114548
|
272 |
+
n02114712
|
273 |
+
n02114855
|
274 |
+
n02115641
|
275 |
+
n02115913
|
276 |
+
n02116738
|
277 |
+
n02117135
|
278 |
+
n02119022
|
279 |
+
n02119789
|
280 |
+
n02120079
|
281 |
+
n02120505
|
282 |
+
n02123045
|
283 |
+
n02123159
|
284 |
+
n02123394
|
285 |
+
n02123597
|
286 |
+
n02124075
|
287 |
+
n02125311
|
288 |
+
n02127052
|
289 |
+
n02128385
|
290 |
+
n02128757
|
291 |
+
n02128925
|
292 |
+
n02129165
|
293 |
+
n02129604
|
294 |
+
n02130308
|
295 |
+
n02132136
|
296 |
+
n02133161
|
297 |
+
n02134084
|
298 |
+
n02134418
|
299 |
+
n02137549
|
300 |
+
n02138441
|
301 |
+
n02165105
|
302 |
+
n02165456
|
303 |
+
n02167151
|
304 |
+
n02168699
|
305 |
+
n02169497
|
306 |
+
n02172182
|
307 |
+
n02174001
|
308 |
+
n02177972
|
309 |
+
n02190166
|
310 |
+
n02206856
|
311 |
+
n02219486
|
312 |
+
n02226429
|
313 |
+
n02229544
|
314 |
+
n02231487
|
315 |
+
n02233338
|
316 |
+
n02236044
|
317 |
+
n02256656
|
318 |
+
n02259212
|
319 |
+
n02264363
|
320 |
+
n02268443
|
321 |
+
n02268853
|
322 |
+
n02276258
|
323 |
+
n02277742
|
324 |
+
n02279972
|
325 |
+
n02280649
|
326 |
+
n02281406
|
327 |
+
n02281787
|
328 |
+
n02317335
|
329 |
+
n02319095
|
330 |
+
n02321529
|
331 |
+
n02325366
|
332 |
+
n02326432
|
333 |
+
n02328150
|
334 |
+
n02342885
|
335 |
+
n02346627
|
336 |
+
n02356798
|
337 |
+
n02361337
|
338 |
+
n02363005
|
339 |
+
n02364673
|
340 |
+
n02389026
|
341 |
+
n02391049
|
342 |
+
n02395406
|
343 |
+
n02396427
|
344 |
+
n02397096
|
345 |
+
n02398521
|
346 |
+
n02403003
|
347 |
+
n02408429
|
348 |
+
n02410509
|
349 |
+
n02412080
|
350 |
+
n02415577
|
351 |
+
n02417914
|
352 |
+
n02422106
|
353 |
+
n02422699
|
354 |
+
n02423022
|
355 |
+
n02437312
|
356 |
+
n02437616
|
357 |
+
n02441942
|
358 |
+
n02442845
|
359 |
+
n02443114
|
360 |
+
n02443484
|
361 |
+
n02444819
|
362 |
+
n02445715
|
363 |
+
n02447366
|
364 |
+
n02454379
|
365 |
+
n02457408
|
366 |
+
n02480495
|
367 |
+
n02480855
|
368 |
+
n02481823
|
369 |
+
n02483362
|
370 |
+
n02483708
|
371 |
+
n02484975
|
372 |
+
n02486261
|
373 |
+
n02486410
|
374 |
+
n02487347
|
375 |
+
n02488291
|
376 |
+
n02488702
|
377 |
+
n02489166
|
378 |
+
n02490219
|
379 |
+
n02492035
|
380 |
+
n02492660
|
381 |
+
n02493509
|
382 |
+
n02493793
|
383 |
+
n02494079
|
384 |
+
n02497673
|
385 |
+
n02500267
|
386 |
+
n02504013
|
387 |
+
n02504458
|
388 |
+
n02509815
|
389 |
+
n02510455
|
390 |
+
n02514041
|
391 |
+
n02526121
|
392 |
+
n02536864
|
393 |
+
n02606052
|
394 |
+
n02607072
|
395 |
+
n02640242
|
396 |
+
n02641379
|
397 |
+
n02643566
|
398 |
+
n02655020
|
399 |
+
n02666196
|
400 |
+
n02667093
|
401 |
+
n02669723
|
402 |
+
n02672831
|
403 |
+
n02676566
|
404 |
+
n02687172
|
405 |
+
n02690373
|
406 |
+
n02692877
|
407 |
+
n02699494
|
408 |
+
n02701002
|
409 |
+
n02704792
|
410 |
+
n02708093
|
411 |
+
n02727426
|
412 |
+
n02730930
|
413 |
+
n02747177
|
414 |
+
n02749479
|
415 |
+
n02769748
|
416 |
+
n02776631
|
417 |
+
n02777292
|
418 |
+
n02782093
|
419 |
+
n02783161
|
420 |
+
n02786058
|
421 |
+
n02787622
|
422 |
+
n02788148
|
423 |
+
n02790996
|
424 |
+
n02791124
|
425 |
+
n02791270
|
426 |
+
n02793495
|
427 |
+
n02794156
|
428 |
+
n02795169
|
429 |
+
n02797295
|
430 |
+
n02799071
|
431 |
+
n02802426
|
432 |
+
n02804414
|
433 |
+
n02804610
|
434 |
+
n02807133
|
435 |
+
n02808304
|
436 |
+
n02808440
|
437 |
+
n02814533
|
438 |
+
n02814860
|
439 |
+
n02815834
|
440 |
+
n02817516
|
441 |
+
n02823428
|
442 |
+
n02823750
|
443 |
+
n02825657
|
444 |
+
n02834397
|
445 |
+
n02835271
|
446 |
+
n02837789
|
447 |
+
n02840245
|
448 |
+
n02841315
|
449 |
+
n02843684
|
450 |
+
n02859443
|
451 |
+
n02860847
|
452 |
+
n02865351
|
453 |
+
n02869837
|
454 |
+
n02870880
|
455 |
+
n02871525
|
456 |
+
n02877765
|
457 |
+
n02879718
|
458 |
+
n02883205
|
459 |
+
n02892201
|
460 |
+
n02892767
|
461 |
+
n02894605
|
462 |
+
n02895154
|
463 |
+
n02906734
|
464 |
+
n02909870
|
465 |
+
n02910353
|
466 |
+
n02916936
|
467 |
+
n02917067
|
468 |
+
n02927161
|
469 |
+
n02930766
|
470 |
+
n02939185
|
471 |
+
n02948072
|
472 |
+
n02950826
|
473 |
+
n02951358
|
474 |
+
n02951585
|
475 |
+
n02963159
|
476 |
+
n02965783
|
477 |
+
n02966193
|
478 |
+
n02966687
|
479 |
+
n02971356
|
480 |
+
n02974003
|
481 |
+
n02977058
|
482 |
+
n02978881
|
483 |
+
n02979186
|
484 |
+
n02980441
|
485 |
+
n02981792
|
486 |
+
n02988304
|
487 |
+
n02992211
|
488 |
+
n02992529
|
489 |
+
n02999410
|
490 |
+
n03000134
|
491 |
+
n03000247
|
492 |
+
n03000684
|
493 |
+
n03014705
|
494 |
+
n03016953
|
495 |
+
n03017168
|
496 |
+
n03018349
|
497 |
+
n03026506
|
498 |
+
n03028079
|
499 |
+
n03032252
|
500 |
+
n03041632
|
501 |
+
n03042490
|
502 |
+
n03045698
|
503 |
+
n03047690
|
504 |
+
n03062245
|
505 |
+
n03063599
|
506 |
+
n03063689
|
507 |
+
n03065424
|
508 |
+
n03075370
|
509 |
+
n03085013
|
510 |
+
n03089624
|
511 |
+
n03095699
|
512 |
+
n03100240
|
513 |
+
n03109150
|
514 |
+
n03110669
|
515 |
+
n03124043
|
516 |
+
n03124170
|
517 |
+
n03125729
|
518 |
+
n03126707
|
519 |
+
n03127747
|
520 |
+
n03127925
|
521 |
+
n03131574
|
522 |
+
n03133878
|
523 |
+
n03134739
|
524 |
+
n03141823
|
525 |
+
n03146219
|
526 |
+
n03160309
|
527 |
+
n03179701
|
528 |
+
n03180011
|
529 |
+
n03187595
|
530 |
+
n03188531
|
531 |
+
n03196217
|
532 |
+
n03197337
|
533 |
+
n03201208
|
534 |
+
n03207743
|
535 |
+
n03207941
|
536 |
+
n03208938
|
537 |
+
n03216828
|
538 |
+
n03218198
|
539 |
+
n03220513
|
540 |
+
n03223299
|
541 |
+
n03240683
|
542 |
+
n03249569
|
543 |
+
n03250847
|
544 |
+
n03255030
|
545 |
+
n03259280
|
546 |
+
n03271574
|
547 |
+
n03272010
|
548 |
+
n03272562
|
549 |
+
n03290653
|
550 |
+
n03291819
|
551 |
+
n03297495
|
552 |
+
n03314780
|
553 |
+
n03325584
|
554 |
+
n03337140
|
555 |
+
n03344393
|
556 |
+
n03345487
|
557 |
+
n03347037
|
558 |
+
n03355925
|
559 |
+
n03372029
|
560 |
+
n03376595
|
561 |
+
n03379051
|
562 |
+
n03384352
|
563 |
+
n03388043
|
564 |
+
n03388183
|
565 |
+
n03388549
|
566 |
+
n03393912
|
567 |
+
n03394916
|
568 |
+
n03400231
|
569 |
+
n03404251
|
570 |
+
n03417042
|
571 |
+
n03424325
|
572 |
+
n03425413
|
573 |
+
n03443371
|
574 |
+
n03444034
|
575 |
+
n03445777
|
576 |
+
n03445924
|
577 |
+
n03447447
|
578 |
+
n03447721
|
579 |
+
n03450230
|
580 |
+
n03452741
|
581 |
+
n03457902
|
582 |
+
n03459775
|
583 |
+
n03461385
|
584 |
+
n03467068
|
585 |
+
n03476684
|
586 |
+
n03476991
|
587 |
+
n03478589
|
588 |
+
n03481172
|
589 |
+
n03482405
|
590 |
+
n03483316
|
591 |
+
n03485407
|
592 |
+
n03485794
|
593 |
+
n03492542
|
594 |
+
n03494278
|
595 |
+
n03495258
|
596 |
+
n03496892
|
597 |
+
n03498962
|
598 |
+
n03527444
|
599 |
+
n03529860
|
600 |
+
n03530642
|
601 |
+
n03532672
|
602 |
+
n03534580
|
603 |
+
n03535780
|
604 |
+
n03538406
|
605 |
+
n03544143
|
606 |
+
n03584254
|
607 |
+
n03584829
|
608 |
+
n03590841
|
609 |
+
n03594734
|
610 |
+
n03594945
|
611 |
+
n03595614
|
612 |
+
n03598930
|
613 |
+
n03599486
|
614 |
+
n03602883
|
615 |
+
n03617480
|
616 |
+
n03623198
|
617 |
+
n03627232
|
618 |
+
n03630383
|
619 |
+
n03633091
|
620 |
+
n03637318
|
621 |
+
n03642806
|
622 |
+
n03649909
|
623 |
+
n03657121
|
624 |
+
n03658185
|
625 |
+
n03661043
|
626 |
+
n03662601
|
627 |
+
n03666591
|
628 |
+
n03670208
|
629 |
+
n03673027
|
630 |
+
n03676483
|
631 |
+
n03680355
|
632 |
+
n03690938
|
633 |
+
n03691459
|
634 |
+
n03692522
|
635 |
+
n03697007
|
636 |
+
n03706229
|
637 |
+
n03709823
|
638 |
+
n03710193
|
639 |
+
n03710637
|
640 |
+
n03710721
|
641 |
+
n03717622
|
642 |
+
n03720891
|
643 |
+
n03721384
|
644 |
+
n03724870
|
645 |
+
n03729826
|
646 |
+
n03733131
|
647 |
+
n03733281
|
648 |
+
n03733805
|
649 |
+
n03742115
|
650 |
+
n03743016
|
651 |
+
n03759954
|
652 |
+
n03761084
|
653 |
+
n03763968
|
654 |
+
n03764736
|
655 |
+
n03769881
|
656 |
+
n03770439
|
657 |
+
n03770679
|
658 |
+
n03773504
|
659 |
+
n03775071
|
660 |
+
n03775546
|
661 |
+
n03776460
|
662 |
+
n03777568
|
663 |
+
n03777754
|
664 |
+
n03781244
|
665 |
+
n03782006
|
666 |
+
n03785016
|
667 |
+
n03786901
|
668 |
+
n03787032
|
669 |
+
n03788195
|
670 |
+
n03788365
|
671 |
+
n03791053
|
672 |
+
n03792782
|
673 |
+
n03792972
|
674 |
+
n03793489
|
675 |
+
n03794056
|
676 |
+
n03796401
|
677 |
+
n03803284
|
678 |
+
n03804744
|
679 |
+
n03814639
|
680 |
+
n03814906
|
681 |
+
n03825788
|
682 |
+
n03832673
|
683 |
+
n03837869
|
684 |
+
n03838899
|
685 |
+
n03840681
|
686 |
+
n03841143
|
687 |
+
n03843555
|
688 |
+
n03854065
|
689 |
+
n03857828
|
690 |
+
n03866082
|
691 |
+
n03868242
|
692 |
+
n03868863
|
693 |
+
n03871628
|
694 |
+
n03873416
|
695 |
+
n03874293
|
696 |
+
n03874599
|
697 |
+
n03876231
|
698 |
+
n03877472
|
699 |
+
n03877845
|
700 |
+
n03884397
|
701 |
+
n03887697
|
702 |
+
n03888257
|
703 |
+
n03888605
|
704 |
+
n03891251
|
705 |
+
n03891332
|
706 |
+
n03895866
|
707 |
+
n03899768
|
708 |
+
n03902125
|
709 |
+
n03903868
|
710 |
+
n03908618
|
711 |
+
n03908714
|
712 |
+
n03916031
|
713 |
+
n03920288
|
714 |
+
n03924679
|
715 |
+
n03929660
|
716 |
+
n03929855
|
717 |
+
n03930313
|
718 |
+
n03930630
|
719 |
+
n03933933
|
720 |
+
n03935335
|
721 |
+
n03937543
|
722 |
+
n03938244
|
723 |
+
n03942813
|
724 |
+
n03944341
|
725 |
+
n03947888
|
726 |
+
n03950228
|
727 |
+
n03954731
|
728 |
+
n03956157
|
729 |
+
n03958227
|
730 |
+
n03961711
|
731 |
+
n03967562
|
732 |
+
n03970156
|
733 |
+
n03976467
|
734 |
+
n03976657
|
735 |
+
n03977966
|
736 |
+
n03980874
|
737 |
+
n03982430
|
738 |
+
n03983396
|
739 |
+
n03991062
|
740 |
+
n03992509
|
741 |
+
n03995372
|
742 |
+
n03998194
|
743 |
+
n04004767
|
744 |
+
n04005630
|
745 |
+
n04008634
|
746 |
+
n04009552
|
747 |
+
n04019541
|
748 |
+
n04023962
|
749 |
+
n04026417
|
750 |
+
n04033901
|
751 |
+
n04033995
|
752 |
+
n04037443
|
753 |
+
n04039381
|
754 |
+
n04040759
|
755 |
+
n04041544
|
756 |
+
n04044716
|
757 |
+
n04049303
|
758 |
+
n04065272
|
759 |
+
n04067472
|
760 |
+
n04069434
|
761 |
+
n04070727
|
762 |
+
n04074963
|
763 |
+
n04081281
|
764 |
+
n04086273
|
765 |
+
n04090263
|
766 |
+
n04099969
|
767 |
+
n04111531
|
768 |
+
n04116512
|
769 |
+
n04118538
|
770 |
+
n04118776
|
771 |
+
n04120489
|
772 |
+
n04125021
|
773 |
+
n04127249
|
774 |
+
n04131690
|
775 |
+
n04133789
|
776 |
+
n04136333
|
777 |
+
n04141076
|
778 |
+
n04141327
|
779 |
+
n04141975
|
780 |
+
n04146614
|
781 |
+
n04147183
|
782 |
+
n04149813
|
783 |
+
n04152593
|
784 |
+
n04153751
|
785 |
+
n04154565
|
786 |
+
n04162706
|
787 |
+
n04179913
|
788 |
+
n04192698
|
789 |
+
n04200800
|
790 |
+
n04201297
|
791 |
+
n04204238
|
792 |
+
n04204347
|
793 |
+
n04208210
|
794 |
+
n04209133
|
795 |
+
n04209239
|
796 |
+
n04228054
|
797 |
+
n04229816
|
798 |
+
n04235860
|
799 |
+
n04238763
|
800 |
+
n04239074
|
801 |
+
n04243546
|
802 |
+
n04251144
|
803 |
+
n04252077
|
804 |
+
n04252225
|
805 |
+
n04254120
|
806 |
+
n04254680
|
807 |
+
n04254777
|
808 |
+
n04258138
|
809 |
+
n04259630
|
810 |
+
n04263257
|
811 |
+
n04264628
|
812 |
+
n04265275
|
813 |
+
n04266014
|
814 |
+
n04270147
|
815 |
+
n04273569
|
816 |
+
n04275548
|
817 |
+
n04277352
|
818 |
+
n04285008
|
819 |
+
n04286575
|
820 |
+
n04296562
|
821 |
+
n04310018
|
822 |
+
n04311004
|
823 |
+
n04311174
|
824 |
+
n04317175
|
825 |
+
n04325704
|
826 |
+
n04326547
|
827 |
+
n04328186
|
828 |
+
n04330267
|
829 |
+
n04332243
|
830 |
+
n04335435
|
831 |
+
n04336792
|
832 |
+
n04344873
|
833 |
+
n04346328
|
834 |
+
n04347754
|
835 |
+
n04350905
|
836 |
+
n04355338
|
837 |
+
n04355933
|
838 |
+
n04356056
|
839 |
+
n04357314
|
840 |
+
n04366367
|
841 |
+
n04367480
|
842 |
+
n04370456
|
843 |
+
n04371430
|
844 |
+
n04371774
|
845 |
+
n04372370
|
846 |
+
n04376876
|
847 |
+
n04380533
|
848 |
+
n04389033
|
849 |
+
n04392985
|
850 |
+
n04398044
|
851 |
+
n04399382
|
852 |
+
n04404412
|
853 |
+
n04409515
|
854 |
+
n04417672
|
855 |
+
n04418357
|
856 |
+
n04423845
|
857 |
+
n04428191
|
858 |
+
n04429376
|
859 |
+
n04435653
|
860 |
+
n04442312
|
861 |
+
n04443257
|
862 |
+
n04447861
|
863 |
+
n04456115
|
864 |
+
n04458633
|
865 |
+
n04461696
|
866 |
+
n04462240
|
867 |
+
n04465501
|
868 |
+
n04467665
|
869 |
+
n04476259
|
870 |
+
n04479046
|
871 |
+
n04482393
|
872 |
+
n04483307
|
873 |
+
n04485082
|
874 |
+
n04486054
|
875 |
+
n04487081
|
876 |
+
n04487394
|
877 |
+
n04493381
|
878 |
+
n04501370
|
879 |
+
n04505470
|
880 |
+
n04507155
|
881 |
+
n04509417
|
882 |
+
n04515003
|
883 |
+
n04517823
|
884 |
+
n04522168
|
885 |
+
n04523525
|
886 |
+
n04525038
|
887 |
+
n04525305
|
888 |
+
n04532106
|
889 |
+
n04532670
|
890 |
+
n04536866
|
891 |
+
n04540053
|
892 |
+
n04542943
|
893 |
+
n04548280
|
894 |
+
n04548362
|
895 |
+
n04550184
|
896 |
+
n04552348
|
897 |
+
n04553703
|
898 |
+
n04554684
|
899 |
+
n04557648
|
900 |
+
n04560804
|
901 |
+
n04562935
|
902 |
+
n04579145
|
903 |
+
n04579432
|
904 |
+
n04584207
|
905 |
+
n04589890
|
906 |
+
n04590129
|
907 |
+
n04591157
|
908 |
+
n04591713
|
909 |
+
n04592741
|
910 |
+
n04596742
|
911 |
+
n04597913
|
912 |
+
n04599235
|
913 |
+
n04604644
|
914 |
+
n04606251
|
915 |
+
n04612504
|
916 |
+
n04613696
|
917 |
+
n06359193
|
918 |
+
n06596364
|
919 |
+
n06785654
|
920 |
+
n06794110
|
921 |
+
n06874185
|
922 |
+
n07248320
|
923 |
+
n07565083
|
924 |
+
n07579787
|
925 |
+
n07583066
|
926 |
+
n07584110
|
927 |
+
n07590611
|
928 |
+
n07613480
|
929 |
+
n07614500
|
930 |
+
n07615774
|
931 |
+
n07684084
|
932 |
+
n07693725
|
933 |
+
n07695742
|
934 |
+
n07697313
|
935 |
+
n07697537
|
936 |
+
n07711569
|
937 |
+
n07714571
|
938 |
+
n07714990
|
939 |
+
n07715103
|
940 |
+
n07716358
|
941 |
+
n07716906
|
942 |
+
n07717410
|
943 |
+
n07717556
|
944 |
+
n07718472
|
945 |
+
n07718747
|
946 |
+
n07720875
|
947 |
+
n07730033
|
948 |
+
n07734744
|
949 |
+
n07742313
|
950 |
+
n07745940
|
951 |
+
n07747607
|
952 |
+
n07749582
|
953 |
+
n07753113
|
954 |
+
n07753275
|
955 |
+
n07753592
|
956 |
+
n07754684
|
957 |
+
n07760859
|
958 |
+
n07768694
|
959 |
+
n07802026
|
960 |
+
n07831146
|
961 |
+
n07836838
|
962 |
+
n07860988
|
963 |
+
n07871810
|
964 |
+
n07873807
|
965 |
+
n07875152
|
966 |
+
n07880968
|
967 |
+
n07892512
|
968 |
+
n07920052
|
969 |
+
n07930864
|
970 |
+
n07932039
|
971 |
+
n09193705
|
972 |
+
n09229709
|
973 |
+
n09246464
|
974 |
+
n09256479
|
975 |
+
n09288635
|
976 |
+
n09332890
|
977 |
+
n09399592
|
978 |
+
n09421951
|
979 |
+
n09428293
|
980 |
+
n09468604
|
981 |
+
n09472597
|
982 |
+
n09835506
|
983 |
+
n10148035
|
984 |
+
n10565667
|
985 |
+
n11879895
|
986 |
+
n11939491
|
987 |
+
n12057211
|
988 |
+
n12144580
|
989 |
+
n12267677
|
990 |
+
n12620546
|
991 |
+
n12768682
|
992 |
+
n12985857
|
993 |
+
n12998815
|
994 |
+
n13037406
|
995 |
+
n13040303
|
996 |
+
n13044778
|
997 |
+
n13052670
|
998 |
+
n13054560
|
999 |
+
n13133613
|
1000 |
+
n15075141
|
pytorch-image-models/timm/data/_info/mini_imagenet_indices.txt
ADDED
@@ -0,0 +1,100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
12
|
2 |
+
15
|
3 |
+
51
|
4 |
+
64
|
5 |
+
70
|
6 |
+
96
|
7 |
+
99
|
8 |
+
107
|
9 |
+
111
|
10 |
+
121
|
11 |
+
149
|
12 |
+
166
|
13 |
+
173
|
14 |
+
176
|
15 |
+
207
|
16 |
+
214
|
17 |
+
228
|
18 |
+
242
|
19 |
+
244
|
20 |
+
245
|
21 |
+
249
|
22 |
+
251
|
23 |
+
256
|
24 |
+
266
|
25 |
+
270
|
26 |
+
275
|
27 |
+
279
|
28 |
+
291
|
29 |
+
299
|
30 |
+
301
|
31 |
+
306
|
32 |
+
310
|
33 |
+
359
|
34 |
+
364
|
35 |
+
392
|
36 |
+
403
|
37 |
+
412
|
38 |
+
427
|
39 |
+
440
|
40 |
+
454
|
41 |
+
471
|
42 |
+
476
|
43 |
+
478
|
44 |
+
484
|
45 |
+
494
|
46 |
+
502
|
47 |
+
503
|
48 |
+
507
|
49 |
+
519
|
50 |
+
524
|
51 |
+
533
|
52 |
+
538
|
53 |
+
546
|
54 |
+
553
|
55 |
+
556
|
56 |
+
567
|
57 |
+
569
|
58 |
+
584
|
59 |
+
597
|
60 |
+
602
|
61 |
+
604
|
62 |
+
605
|
63 |
+
629
|
64 |
+
655
|
65 |
+
657
|
66 |
+
659
|
67 |
+
683
|
68 |
+
687
|
69 |
+
702
|
70 |
+
709
|
71 |
+
713
|
72 |
+
735
|
73 |
+
741
|
74 |
+
758
|
75 |
+
779
|
76 |
+
781
|
77 |
+
800
|
78 |
+
801
|
79 |
+
807
|
80 |
+
815
|
81 |
+
819
|
82 |
+
847
|
83 |
+
854
|
84 |
+
858
|
85 |
+
860
|
86 |
+
880
|
87 |
+
881
|
88 |
+
883
|
89 |
+
909
|
90 |
+
912
|
91 |
+
914
|
92 |
+
919
|
93 |
+
925
|
94 |
+
927
|
95 |
+
934
|
96 |
+
950
|
97 |
+
972
|
98 |
+
973
|
99 |
+
997
|
100 |
+
998
|
pytorch-image-models/timm/data/_info/mini_imagenet_synsets.txt
ADDED
@@ -0,0 +1,100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
n01532829
|
2 |
+
n01558993
|
3 |
+
n01704323
|
4 |
+
n01749939
|
5 |
+
n01770081
|
6 |
+
n01843383
|
7 |
+
n01855672
|
8 |
+
n01910747
|
9 |
+
n01930112
|
10 |
+
n01981276
|
11 |
+
n02074367
|
12 |
+
n02089867
|
13 |
+
n02091244
|
14 |
+
n02091831
|
15 |
+
n02099601
|
16 |
+
n02101006
|
17 |
+
n02105505
|
18 |
+
n02108089
|
19 |
+
n02108551
|
20 |
+
n02108915
|
21 |
+
n02110063
|
22 |
+
n02110341
|
23 |
+
n02111277
|
24 |
+
n02113712
|
25 |
+
n02114548
|
26 |
+
n02116738
|
27 |
+
n02120079
|
28 |
+
n02129165
|
29 |
+
n02138441
|
30 |
+
n02165456
|
31 |
+
n02174001
|
32 |
+
n02219486
|
33 |
+
n02443484
|
34 |
+
n02457408
|
35 |
+
n02606052
|
36 |
+
n02687172
|
37 |
+
n02747177
|
38 |
+
n02795169
|
39 |
+
n02823428
|
40 |
+
n02871525
|
41 |
+
n02950826
|
42 |
+
n02966193
|
43 |
+
n02971356
|
44 |
+
n02981792
|
45 |
+
n03017168
|
46 |
+
n03047690
|
47 |
+
n03062245
|
48 |
+
n03075370
|
49 |
+
n03127925
|
50 |
+
n03146219
|
51 |
+
n03207743
|
52 |
+
n03220513
|
53 |
+
n03272010
|
54 |
+
n03337140
|
55 |
+
n03347037
|
56 |
+
n03400231
|
57 |
+
n03417042
|
58 |
+
n03476684
|
59 |
+
n03527444
|
60 |
+
n03535780
|
61 |
+
n03544143
|
62 |
+
n03584254
|
63 |
+
n03676483
|
64 |
+
n03770439
|
65 |
+
n03773504
|
66 |
+
n03775546
|
67 |
+
n03838899
|
68 |
+
n03854065
|
69 |
+
n03888605
|
70 |
+
n03908618
|
71 |
+
n03924679
|
72 |
+
n03980874
|
73 |
+
n03998194
|
74 |
+
n04067472
|
75 |
+
n04146614
|
76 |
+
n04149813
|
77 |
+
n04243546
|
78 |
+
n04251144
|
79 |
+
n04258138
|
80 |
+
n04275548
|
81 |
+
n04296562
|
82 |
+
n04389033
|
83 |
+
n04418357
|
84 |
+
n04435653
|
85 |
+
n04443257
|
86 |
+
n04509417
|
87 |
+
n04515003
|
88 |
+
n04522168
|
89 |
+
n04596742
|
90 |
+
n04604644
|
91 |
+
n04612504
|
92 |
+
n06794110
|
93 |
+
n07584110
|
94 |
+
n07613480
|
95 |
+
n07697537
|
96 |
+
n07747607
|
97 |
+
n09246464
|
98 |
+
n09256479
|
99 |
+
n13054560
|
100 |
+
n13133613
|
pytorch-image-models/timm/data/auto_augment.py
ADDED
@@ -0,0 +1,1000 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
""" AutoAugment, RandAugment, AugMix, and 3-Augment for PyTorch
|
2 |
+
|
3 |
+
This code implements the searched ImageNet policies with various tweaks and improvements and
|
4 |
+
does not include any of the search code.
|
5 |
+
|
6 |
+
AA and RA Implementation adapted from:
|
7 |
+
https://github.com/tensorflow/tpu/blob/master/models/official/efficientnet/autoaugment.py
|
8 |
+
|
9 |
+
AugMix adapted from:
|
10 |
+
https://github.com/google-research/augmix
|
11 |
+
|
12 |
+
3-Augment based on: https://github.com/facebookresearch/deit/blob/main/README_revenge.md
|
13 |
+
|
14 |
+
Papers:
|
15 |
+
AutoAugment: Learning Augmentation Policies from Data - https://arxiv.org/abs/1805.09501
|
16 |
+
Learning Data Augmentation Strategies for Object Detection - https://arxiv.org/abs/1906.11172
|
17 |
+
RandAugment: Practical automated data augmentation... - https://arxiv.org/abs/1909.13719
|
18 |
+
AugMix: A Simple Data Processing Method to Improve Robustness and Uncertainty - https://arxiv.org/abs/1912.02781
|
19 |
+
3-Augment: DeiT III: Revenge of the ViT - https://arxiv.org/abs/2204.07118
|
20 |
+
|
21 |
+
Hacked together by / Copyright 2019, Ross Wightman
|
22 |
+
"""
|
23 |
+
import random
|
24 |
+
import math
|
25 |
+
import re
|
26 |
+
from functools import partial
|
27 |
+
from typing import Dict, List, Optional, Union
|
28 |
+
|
29 |
+
from PIL import Image, ImageOps, ImageEnhance, ImageChops, ImageFilter
|
30 |
+
import PIL
|
31 |
+
import numpy as np
|
32 |
+
|
33 |
+
|
34 |
+
_PIL_VER = tuple([int(x) for x in PIL.__version__.split('.')[:2]])
|
35 |
+
|
36 |
+
_FILL = (128, 128, 128)
|
37 |
+
|
38 |
+
_LEVEL_DENOM = 10. # denominator for conversion from 'Mx' magnitude scale to fractional aug level for op arguments
|
39 |
+
|
40 |
+
_HPARAMS_DEFAULT = dict(
|
41 |
+
translate_const=250,
|
42 |
+
img_mean=_FILL,
|
43 |
+
)
|
44 |
+
|
45 |
+
if hasattr(Image, "Resampling"):
|
46 |
+
_RANDOM_INTERPOLATION = (Image.Resampling.BILINEAR, Image.Resampling.BICUBIC)
|
47 |
+
_DEFAULT_INTERPOLATION = Image.Resampling.BICUBIC
|
48 |
+
else:
|
49 |
+
_RANDOM_INTERPOLATION = (Image.BILINEAR, Image.BICUBIC)
|
50 |
+
_DEFAULT_INTERPOLATION = Image.BICUBIC
|
51 |
+
|
52 |
+
|
53 |
+
def _interpolation(kwargs):
|
54 |
+
interpolation = kwargs.pop('resample', _DEFAULT_INTERPOLATION)
|
55 |
+
if isinstance(interpolation, (list, tuple)):
|
56 |
+
return random.choice(interpolation)
|
57 |
+
return interpolation
|
58 |
+
|
59 |
+
|
60 |
+
def _check_args_tf(kwargs):
|
61 |
+
if 'fillcolor' in kwargs and _PIL_VER < (5, 0):
|
62 |
+
kwargs.pop('fillcolor')
|
63 |
+
kwargs['resample'] = _interpolation(kwargs)
|
64 |
+
|
65 |
+
|
66 |
+
def shear_x(img, factor, **kwargs):
|
67 |
+
_check_args_tf(kwargs)
|
68 |
+
return img.transform(img.size, Image.AFFINE, (1, factor, 0, 0, 1, 0), **kwargs)
|
69 |
+
|
70 |
+
|
71 |
+
def shear_y(img, factor, **kwargs):
|
72 |
+
_check_args_tf(kwargs)
|
73 |
+
return img.transform(img.size, Image.AFFINE, (1, 0, 0, factor, 1, 0), **kwargs)
|
74 |
+
|
75 |
+
|
76 |
+
def translate_x_rel(img, pct, **kwargs):
|
77 |
+
pixels = pct * img.size[0]
|
78 |
+
_check_args_tf(kwargs)
|
79 |
+
return img.transform(img.size, Image.AFFINE, (1, 0, pixels, 0, 1, 0), **kwargs)
|
80 |
+
|
81 |
+
|
82 |
+
def translate_y_rel(img, pct, **kwargs):
|
83 |
+
pixels = pct * img.size[1]
|
84 |
+
_check_args_tf(kwargs)
|
85 |
+
return img.transform(img.size, Image.AFFINE, (1, 0, 0, 0, 1, pixels), **kwargs)
|
86 |
+
|
87 |
+
|
88 |
+
def translate_x_abs(img, pixels, **kwargs):
|
89 |
+
_check_args_tf(kwargs)
|
90 |
+
return img.transform(img.size, Image.AFFINE, (1, 0, pixels, 0, 1, 0), **kwargs)
|
91 |
+
|
92 |
+
|
93 |
+
def translate_y_abs(img, pixels, **kwargs):
|
94 |
+
_check_args_tf(kwargs)
|
95 |
+
return img.transform(img.size, Image.AFFINE, (1, 0, 0, 0, 1, pixels), **kwargs)
|
96 |
+
|
97 |
+
|
98 |
+
def rotate(img, degrees, **kwargs):
|
99 |
+
_check_args_tf(kwargs)
|
100 |
+
if _PIL_VER >= (5, 2):
|
101 |
+
return img.rotate(degrees, **kwargs)
|
102 |
+
if _PIL_VER >= (5, 0):
|
103 |
+
w, h = img.size
|
104 |
+
post_trans = (0, 0)
|
105 |
+
rotn_center = (w / 2.0, h / 2.0)
|
106 |
+
angle = -math.radians(degrees)
|
107 |
+
matrix = [
|
108 |
+
round(math.cos(angle), 15),
|
109 |
+
round(math.sin(angle), 15),
|
110 |
+
0.0,
|
111 |
+
round(-math.sin(angle), 15),
|
112 |
+
round(math.cos(angle), 15),
|
113 |
+
0.0,
|
114 |
+
]
|
115 |
+
|
116 |
+
def transform(x, y, matrix):
|
117 |
+
(a, b, c, d, e, f) = matrix
|
118 |
+
return a * x + b * y + c, d * x + e * y + f
|
119 |
+
|
120 |
+
matrix[2], matrix[5] = transform(
|
121 |
+
-rotn_center[0] - post_trans[0], -rotn_center[1] - post_trans[1], matrix
|
122 |
+
)
|
123 |
+
matrix[2] += rotn_center[0]
|
124 |
+
matrix[5] += rotn_center[1]
|
125 |
+
return img.transform(img.size, Image.AFFINE, matrix, **kwargs)
|
126 |
+
return img.rotate(degrees, resample=kwargs['resample'])
|
127 |
+
|
128 |
+
|
129 |
+
def auto_contrast(img, **__):
|
130 |
+
return ImageOps.autocontrast(img)
|
131 |
+
|
132 |
+
|
133 |
+
def invert(img, **__):
|
134 |
+
return ImageOps.invert(img)
|
135 |
+
|
136 |
+
|
137 |
+
def equalize(img, **__):
|
138 |
+
return ImageOps.equalize(img)
|
139 |
+
|
140 |
+
|
141 |
+
def solarize(img, thresh, **__):
|
142 |
+
return ImageOps.solarize(img, thresh)
|
143 |
+
|
144 |
+
|
145 |
+
def solarize_add(img, add, thresh=128, **__):
|
146 |
+
lut = []
|
147 |
+
for i in range(256):
|
148 |
+
if i < thresh:
|
149 |
+
lut.append(min(255, i + add))
|
150 |
+
else:
|
151 |
+
lut.append(i)
|
152 |
+
|
153 |
+
if img.mode in ("L", "RGB"):
|
154 |
+
if img.mode == "RGB" and len(lut) == 256:
|
155 |
+
lut = lut + lut + lut
|
156 |
+
return img.point(lut)
|
157 |
+
|
158 |
+
return img
|
159 |
+
|
160 |
+
|
161 |
+
def posterize(img, bits_to_keep, **__):
|
162 |
+
if bits_to_keep >= 8:
|
163 |
+
return img
|
164 |
+
return ImageOps.posterize(img, bits_to_keep)
|
165 |
+
|
166 |
+
|
167 |
+
def contrast(img, factor, **__):
|
168 |
+
return ImageEnhance.Contrast(img).enhance(factor)
|
169 |
+
|
170 |
+
|
171 |
+
def color(img, factor, **__):
|
172 |
+
return ImageEnhance.Color(img).enhance(factor)
|
173 |
+
|
174 |
+
|
175 |
+
def brightness(img, factor, **__):
|
176 |
+
return ImageEnhance.Brightness(img).enhance(factor)
|
177 |
+
|
178 |
+
|
179 |
+
def sharpness(img, factor, **__):
|
180 |
+
return ImageEnhance.Sharpness(img).enhance(factor)
|
181 |
+
|
182 |
+
|
183 |
+
def gaussian_blur(img, factor, **__):
|
184 |
+
img = img.filter(ImageFilter.GaussianBlur(radius=factor))
|
185 |
+
return img
|
186 |
+
|
187 |
+
|
188 |
+
def gaussian_blur_rand(img, factor, **__):
|
189 |
+
radius_min = 0.1
|
190 |
+
radius_max = 2.0
|
191 |
+
img = img.filter(ImageFilter.GaussianBlur(radius=random.uniform(radius_min, radius_max * factor)))
|
192 |
+
return img
|
193 |
+
|
194 |
+
|
195 |
+
def desaturate(img, factor, **_):
|
196 |
+
factor = min(1., max(0., 1. - factor))
|
197 |
+
# enhance factor 0 = grayscale, 1.0 = no-change
|
198 |
+
return ImageEnhance.Color(img).enhance(factor)
|
199 |
+
|
200 |
+
|
201 |
+
def _randomly_negate(v):
|
202 |
+
"""With 50% prob, negate the value"""
|
203 |
+
return -v if random.random() > 0.5 else v
|
204 |
+
|
205 |
+
|
206 |
+
def _rotate_level_to_arg(level, _hparams):
|
207 |
+
# range [-30, 30]
|
208 |
+
level = (level / _LEVEL_DENOM) * 30.
|
209 |
+
level = _randomly_negate(level)
|
210 |
+
return level,
|
211 |
+
|
212 |
+
|
213 |
+
def _enhance_level_to_arg(level, _hparams):
|
214 |
+
# range [0.1, 1.9]
|
215 |
+
return (level / _LEVEL_DENOM) * 1.8 + 0.1,
|
216 |
+
|
217 |
+
|
218 |
+
def _enhance_increasing_level_to_arg(level, _hparams):
|
219 |
+
# the 'no change' level is 1.0, moving away from that towards 0. or 2.0 increases the enhancement blend
|
220 |
+
# range [0.1, 1.9] if level <= _LEVEL_DENOM
|
221 |
+
level = (level / _LEVEL_DENOM) * .9
|
222 |
+
level = max(0.1, 1.0 + _randomly_negate(level)) # keep it >= 0.1
|
223 |
+
return level,
|
224 |
+
|
225 |
+
|
226 |
+
def _minmax_level_to_arg(level, _hparams, min_val=0., max_val=1.0, clamp=True):
|
227 |
+
level = (level / _LEVEL_DENOM)
|
228 |
+
level = min_val + (max_val - min_val) * level
|
229 |
+
if clamp:
|
230 |
+
level = max(min_val, min(max_val, level))
|
231 |
+
return level,
|
232 |
+
|
233 |
+
|
234 |
+
def _shear_level_to_arg(level, _hparams):
|
235 |
+
# range [-0.3, 0.3]
|
236 |
+
level = (level / _LEVEL_DENOM) * 0.3
|
237 |
+
level = _randomly_negate(level)
|
238 |
+
return level,
|
239 |
+
|
240 |
+
|
241 |
+
def _translate_abs_level_to_arg(level, hparams):
|
242 |
+
translate_const = hparams['translate_const']
|
243 |
+
level = (level / _LEVEL_DENOM) * float(translate_const)
|
244 |
+
level = _randomly_negate(level)
|
245 |
+
return level,
|
246 |
+
|
247 |
+
|
248 |
+
def _translate_rel_level_to_arg(level, hparams):
|
249 |
+
# default range [-0.45, 0.45]
|
250 |
+
translate_pct = hparams.get('translate_pct', 0.45)
|
251 |
+
level = (level / _LEVEL_DENOM) * translate_pct
|
252 |
+
level = _randomly_negate(level)
|
253 |
+
return level,
|
254 |
+
|
255 |
+
|
256 |
+
def _posterize_level_to_arg(level, _hparams):
|
257 |
+
# As per Tensorflow TPU EfficientNet impl
|
258 |
+
# range [0, 4], 'keep 0 up to 4 MSB of original image'
|
259 |
+
# intensity/severity of augmentation decreases with level
|
260 |
+
return int((level / _LEVEL_DENOM) * 4),
|
261 |
+
|
262 |
+
|
263 |
+
def _posterize_increasing_level_to_arg(level, hparams):
|
264 |
+
# As per Tensorflow models research and UDA impl
|
265 |
+
# range [4, 0], 'keep 4 down to 0 MSB of original image',
|
266 |
+
# intensity/severity of augmentation increases with level
|
267 |
+
return 4 - _posterize_level_to_arg(level, hparams)[0],
|
268 |
+
|
269 |
+
|
270 |
+
def _posterize_original_level_to_arg(level, _hparams):
|
271 |
+
# As per original AutoAugment paper description
|
272 |
+
# range [4, 8], 'keep 4 up to 8 MSB of image'
|
273 |
+
# intensity/severity of augmentation decreases with level
|
274 |
+
return int((level / _LEVEL_DENOM) * 4) + 4,
|
275 |
+
|
276 |
+
|
277 |
+
def _solarize_level_to_arg(level, _hparams):
|
278 |
+
# range [0, 256]
|
279 |
+
# intensity/severity of augmentation decreases with level
|
280 |
+
return min(256, int((level / _LEVEL_DENOM) * 256)),
|
281 |
+
|
282 |
+
|
283 |
+
def _solarize_increasing_level_to_arg(level, _hparams):
|
284 |
+
# range [0, 256]
|
285 |
+
# intensity/severity of augmentation increases with level
|
286 |
+
return 256 - _solarize_level_to_arg(level, _hparams)[0],
|
287 |
+
|
288 |
+
|
289 |
+
def _solarize_add_level_to_arg(level, _hparams):
|
290 |
+
# range [0, 110]
|
291 |
+
return min(128, int((level / _LEVEL_DENOM) * 110)),
|
292 |
+
|
293 |
+
|
294 |
+
LEVEL_TO_ARG = {
|
295 |
+
'AutoContrast': None,
|
296 |
+
'Equalize': None,
|
297 |
+
'Invert': None,
|
298 |
+
'Rotate': _rotate_level_to_arg,
|
299 |
+
# There are several variations of the posterize level scaling in various Tensorflow/Google repositories/papers
|
300 |
+
'Posterize': _posterize_level_to_arg,
|
301 |
+
'PosterizeIncreasing': _posterize_increasing_level_to_arg,
|
302 |
+
'PosterizeOriginal': _posterize_original_level_to_arg,
|
303 |
+
'Solarize': _solarize_level_to_arg,
|
304 |
+
'SolarizeIncreasing': _solarize_increasing_level_to_arg,
|
305 |
+
'SolarizeAdd': _solarize_add_level_to_arg,
|
306 |
+
'Color': _enhance_level_to_arg,
|
307 |
+
'ColorIncreasing': _enhance_increasing_level_to_arg,
|
308 |
+
'Contrast': _enhance_level_to_arg,
|
309 |
+
'ContrastIncreasing': _enhance_increasing_level_to_arg,
|
310 |
+
'Brightness': _enhance_level_to_arg,
|
311 |
+
'BrightnessIncreasing': _enhance_increasing_level_to_arg,
|
312 |
+
'Sharpness': _enhance_level_to_arg,
|
313 |
+
'SharpnessIncreasing': _enhance_increasing_level_to_arg,
|
314 |
+
'ShearX': _shear_level_to_arg,
|
315 |
+
'ShearY': _shear_level_to_arg,
|
316 |
+
'TranslateX': _translate_abs_level_to_arg,
|
317 |
+
'TranslateY': _translate_abs_level_to_arg,
|
318 |
+
'TranslateXRel': _translate_rel_level_to_arg,
|
319 |
+
'TranslateYRel': _translate_rel_level_to_arg,
|
320 |
+
'Desaturate': partial(_minmax_level_to_arg, min_val=0.5, max_val=1.0),
|
321 |
+
'GaussianBlur': partial(_minmax_level_to_arg, min_val=0.1, max_val=2.0),
|
322 |
+
'GaussianBlurRand': _minmax_level_to_arg,
|
323 |
+
}
|
324 |
+
|
325 |
+
|
326 |
+
NAME_TO_OP = {
|
327 |
+
'AutoContrast': auto_contrast,
|
328 |
+
'Equalize': equalize,
|
329 |
+
'Invert': invert,
|
330 |
+
'Rotate': rotate,
|
331 |
+
'Posterize': posterize,
|
332 |
+
'PosterizeIncreasing': posterize,
|
333 |
+
'PosterizeOriginal': posterize,
|
334 |
+
'Solarize': solarize,
|
335 |
+
'SolarizeIncreasing': solarize,
|
336 |
+
'SolarizeAdd': solarize_add,
|
337 |
+
'Color': color,
|
338 |
+
'ColorIncreasing': color,
|
339 |
+
'Contrast': contrast,
|
340 |
+
'ContrastIncreasing': contrast,
|
341 |
+
'Brightness': brightness,
|
342 |
+
'BrightnessIncreasing': brightness,
|
343 |
+
'Sharpness': sharpness,
|
344 |
+
'SharpnessIncreasing': sharpness,
|
345 |
+
'ShearX': shear_x,
|
346 |
+
'ShearY': shear_y,
|
347 |
+
'TranslateX': translate_x_abs,
|
348 |
+
'TranslateY': translate_y_abs,
|
349 |
+
'TranslateXRel': translate_x_rel,
|
350 |
+
'TranslateYRel': translate_y_rel,
|
351 |
+
'Desaturate': desaturate,
|
352 |
+
'GaussianBlur': gaussian_blur,
|
353 |
+
'GaussianBlurRand': gaussian_blur_rand,
|
354 |
+
}
|
355 |
+
|
356 |
+
|
357 |
+
class AugmentOp:
|
358 |
+
|
359 |
+
def __init__(self, name, prob=0.5, magnitude=10, hparams=None):
|
360 |
+
hparams = hparams or _HPARAMS_DEFAULT
|
361 |
+
self.name = name
|
362 |
+
self.aug_fn = NAME_TO_OP[name]
|
363 |
+
self.level_fn = LEVEL_TO_ARG[name]
|
364 |
+
self.prob = prob
|
365 |
+
self.magnitude = magnitude
|
366 |
+
self.hparams = hparams.copy()
|
367 |
+
self.kwargs = dict(
|
368 |
+
fillcolor=hparams['img_mean'] if 'img_mean' in hparams else _FILL,
|
369 |
+
resample=hparams['interpolation'] if 'interpolation' in hparams else _RANDOM_INTERPOLATION,
|
370 |
+
)
|
371 |
+
|
372 |
+
# If magnitude_std is > 0, we introduce some randomness
|
373 |
+
# in the usually fixed policy and sample magnitude from a normal distribution
|
374 |
+
# with mean `magnitude` and std-dev of `magnitude_std`.
|
375 |
+
# NOTE This is my own hack, being tested, not in papers or reference impls.
|
376 |
+
# If magnitude_std is inf, we sample magnitude from a uniform distribution
|
377 |
+
self.magnitude_std = self.hparams.get('magnitude_std', 0)
|
378 |
+
self.magnitude_max = self.hparams.get('magnitude_max', None)
|
379 |
+
|
380 |
+
def __call__(self, img):
|
381 |
+
if self.prob < 1.0 and random.random() > self.prob:
|
382 |
+
return img
|
383 |
+
magnitude = self.magnitude
|
384 |
+
if self.magnitude_std > 0:
|
385 |
+
# magnitude randomization enabled
|
386 |
+
if self.magnitude_std == float('inf'):
|
387 |
+
# inf == uniform sampling
|
388 |
+
magnitude = random.uniform(0, magnitude)
|
389 |
+
elif self.magnitude_std > 0:
|
390 |
+
magnitude = random.gauss(magnitude, self.magnitude_std)
|
391 |
+
# default upper_bound for the timm RA impl is _LEVEL_DENOM (10)
|
392 |
+
# setting magnitude_max overrides this to allow M > 10 (behaviour closer to Google TF RA impl)
|
393 |
+
upper_bound = self.magnitude_max or _LEVEL_DENOM
|
394 |
+
magnitude = max(0., min(magnitude, upper_bound))
|
395 |
+
level_args = self.level_fn(magnitude, self.hparams) if self.level_fn is not None else tuple()
|
396 |
+
return self.aug_fn(img, *level_args, **self.kwargs)
|
397 |
+
|
398 |
+
def __repr__(self):
|
399 |
+
fs = self.__class__.__name__ + f'(name={self.name}, p={self.prob}'
|
400 |
+
fs += f', m={self.magnitude}, mstd={self.magnitude_std}'
|
401 |
+
if self.magnitude_max is not None:
|
402 |
+
fs += f', mmax={self.magnitude_max}'
|
403 |
+
fs += ')'
|
404 |
+
return fs
|
405 |
+
|
406 |
+
|
407 |
+
def auto_augment_policy_v0(hparams):
|
408 |
+
# ImageNet v0 policy from TPU EfficientNet impl, cannot find a paper reference.
|
409 |
+
policy = [
|
410 |
+
[('Equalize', 0.8, 1), ('ShearY', 0.8, 4)],
|
411 |
+
[('Color', 0.4, 9), ('Equalize', 0.6, 3)],
|
412 |
+
[('Color', 0.4, 1), ('Rotate', 0.6, 8)],
|
413 |
+
[('Solarize', 0.8, 3), ('Equalize', 0.4, 7)],
|
414 |
+
[('Solarize', 0.4, 2), ('Solarize', 0.6, 2)],
|
415 |
+
[('Color', 0.2, 0), ('Equalize', 0.8, 8)],
|
416 |
+
[('Equalize', 0.4, 8), ('SolarizeAdd', 0.8, 3)],
|
417 |
+
[('ShearX', 0.2, 9), ('Rotate', 0.6, 8)],
|
418 |
+
[('Color', 0.6, 1), ('Equalize', 1.0, 2)],
|
419 |
+
[('Invert', 0.4, 9), ('Rotate', 0.6, 0)],
|
420 |
+
[('Equalize', 1.0, 9), ('ShearY', 0.6, 3)],
|
421 |
+
[('Color', 0.4, 7), ('Equalize', 0.6, 0)],
|
422 |
+
[('Posterize', 0.4, 6), ('AutoContrast', 0.4, 7)],
|
423 |
+
[('Solarize', 0.6, 8), ('Color', 0.6, 9)],
|
424 |
+
[('Solarize', 0.2, 4), ('Rotate', 0.8, 9)],
|
425 |
+
[('Rotate', 1.0, 7), ('TranslateYRel', 0.8, 9)],
|
426 |
+
[('ShearX', 0.0, 0), ('Solarize', 0.8, 4)],
|
427 |
+
[('ShearY', 0.8, 0), ('Color', 0.6, 4)],
|
428 |
+
[('Color', 1.0, 0), ('Rotate', 0.6, 2)],
|
429 |
+
[('Equalize', 0.8, 4), ('Equalize', 0.0, 8)],
|
430 |
+
[('Equalize', 1.0, 4), ('AutoContrast', 0.6, 2)],
|
431 |
+
[('ShearY', 0.4, 7), ('SolarizeAdd', 0.6, 7)],
|
432 |
+
[('Posterize', 0.8, 2), ('Solarize', 0.6, 10)], # This results in black image with Tpu posterize
|
433 |
+
[('Solarize', 0.6, 8), ('Equalize', 0.6, 1)],
|
434 |
+
[('Color', 0.8, 6), ('Rotate', 0.4, 5)],
|
435 |
+
]
|
436 |
+
pc = [[AugmentOp(*a, hparams=hparams) for a in sp] for sp in policy]
|
437 |
+
return pc
|
438 |
+
|
439 |
+
|
440 |
+
def auto_augment_policy_v0r(hparams):
|
441 |
+
# ImageNet v0 policy from TPU EfficientNet impl, with variation of Posterize used
|
442 |
+
# in Google research implementation (number of bits discarded increases with magnitude)
|
443 |
+
policy = [
|
444 |
+
[('Equalize', 0.8, 1), ('ShearY', 0.8, 4)],
|
445 |
+
[('Color', 0.4, 9), ('Equalize', 0.6, 3)],
|
446 |
+
[('Color', 0.4, 1), ('Rotate', 0.6, 8)],
|
447 |
+
[('Solarize', 0.8, 3), ('Equalize', 0.4, 7)],
|
448 |
+
[('Solarize', 0.4, 2), ('Solarize', 0.6, 2)],
|
449 |
+
[('Color', 0.2, 0), ('Equalize', 0.8, 8)],
|
450 |
+
[('Equalize', 0.4, 8), ('SolarizeAdd', 0.8, 3)],
|
451 |
+
[('ShearX', 0.2, 9), ('Rotate', 0.6, 8)],
|
452 |
+
[('Color', 0.6, 1), ('Equalize', 1.0, 2)],
|
453 |
+
[('Invert', 0.4, 9), ('Rotate', 0.6, 0)],
|
454 |
+
[('Equalize', 1.0, 9), ('ShearY', 0.6, 3)],
|
455 |
+
[('Color', 0.4, 7), ('Equalize', 0.6, 0)],
|
456 |
+
[('PosterizeIncreasing', 0.4, 6), ('AutoContrast', 0.4, 7)],
|
457 |
+
[('Solarize', 0.6, 8), ('Color', 0.6, 9)],
|
458 |
+
[('Solarize', 0.2, 4), ('Rotate', 0.8, 9)],
|
459 |
+
[('Rotate', 1.0, 7), ('TranslateYRel', 0.8, 9)],
|
460 |
+
[('ShearX', 0.0, 0), ('Solarize', 0.8, 4)],
|
461 |
+
[('ShearY', 0.8, 0), ('Color', 0.6, 4)],
|
462 |
+
[('Color', 1.0, 0), ('Rotate', 0.6, 2)],
|
463 |
+
[('Equalize', 0.8, 4), ('Equalize', 0.0, 8)],
|
464 |
+
[('Equalize', 1.0, 4), ('AutoContrast', 0.6, 2)],
|
465 |
+
[('ShearY', 0.4, 7), ('SolarizeAdd', 0.6, 7)],
|
466 |
+
[('PosterizeIncreasing', 0.8, 2), ('Solarize', 0.6, 10)],
|
467 |
+
[('Solarize', 0.6, 8), ('Equalize', 0.6, 1)],
|
468 |
+
[('Color', 0.8, 6), ('Rotate', 0.4, 5)],
|
469 |
+
]
|
470 |
+
pc = [[AugmentOp(*a, hparams=hparams) for a in sp] for sp in policy]
|
471 |
+
return pc
|
472 |
+
|
473 |
+
|
474 |
+
def auto_augment_policy_original(hparams):
|
475 |
+
# ImageNet policy from https://arxiv.org/abs/1805.09501
|
476 |
+
policy = [
|
477 |
+
[('PosterizeOriginal', 0.4, 8), ('Rotate', 0.6, 9)],
|
478 |
+
[('Solarize', 0.6, 5), ('AutoContrast', 0.6, 5)],
|
479 |
+
[('Equalize', 0.8, 8), ('Equalize', 0.6, 3)],
|
480 |
+
[('PosterizeOriginal', 0.6, 7), ('PosterizeOriginal', 0.6, 6)],
|
481 |
+
[('Equalize', 0.4, 7), ('Solarize', 0.2, 4)],
|
482 |
+
[('Equalize', 0.4, 4), ('Rotate', 0.8, 8)],
|
483 |
+
[('Solarize', 0.6, 3), ('Equalize', 0.6, 7)],
|
484 |
+
[('PosterizeOriginal', 0.8, 5), ('Equalize', 1.0, 2)],
|
485 |
+
[('Rotate', 0.2, 3), ('Solarize', 0.6, 8)],
|
486 |
+
[('Equalize', 0.6, 8), ('PosterizeOriginal', 0.4, 6)],
|
487 |
+
[('Rotate', 0.8, 8), ('Color', 0.4, 0)],
|
488 |
+
[('Rotate', 0.4, 9), ('Equalize', 0.6, 2)],
|
489 |
+
[('Equalize', 0.0, 7), ('Equalize', 0.8, 8)],
|
490 |
+
[('Invert', 0.6, 4), ('Equalize', 1.0, 8)],
|
491 |
+
[('Color', 0.6, 4), ('Contrast', 1.0, 8)],
|
492 |
+
[('Rotate', 0.8, 8), ('Color', 1.0, 2)],
|
493 |
+
[('Color', 0.8, 8), ('Solarize', 0.8, 7)],
|
494 |
+
[('Sharpness', 0.4, 7), ('Invert', 0.6, 8)],
|
495 |
+
[('ShearX', 0.6, 5), ('Equalize', 1.0, 9)],
|
496 |
+
[('Color', 0.4, 0), ('Equalize', 0.6, 3)],
|
497 |
+
[('Equalize', 0.4, 7), ('Solarize', 0.2, 4)],
|
498 |
+
[('Solarize', 0.6, 5), ('AutoContrast', 0.6, 5)],
|
499 |
+
[('Invert', 0.6, 4), ('Equalize', 1.0, 8)],
|
500 |
+
[('Color', 0.6, 4), ('Contrast', 1.0, 8)],
|
501 |
+
[('Equalize', 0.8, 8), ('Equalize', 0.6, 3)],
|
502 |
+
]
|
503 |
+
pc = [[AugmentOp(*a, hparams=hparams) for a in sp] for sp in policy]
|
504 |
+
return pc
|
505 |
+
|
506 |
+
|
507 |
+
def auto_augment_policy_originalr(hparams):
|
508 |
+
# ImageNet policy from https://arxiv.org/abs/1805.09501 with research posterize variation
|
509 |
+
policy = [
|
510 |
+
[('PosterizeIncreasing', 0.4, 8), ('Rotate', 0.6, 9)],
|
511 |
+
[('Solarize', 0.6, 5), ('AutoContrast', 0.6, 5)],
|
512 |
+
[('Equalize', 0.8, 8), ('Equalize', 0.6, 3)],
|
513 |
+
[('PosterizeIncreasing', 0.6, 7), ('PosterizeIncreasing', 0.6, 6)],
|
514 |
+
[('Equalize', 0.4, 7), ('Solarize', 0.2, 4)],
|
515 |
+
[('Equalize', 0.4, 4), ('Rotate', 0.8, 8)],
|
516 |
+
[('Solarize', 0.6, 3), ('Equalize', 0.6, 7)],
|
517 |
+
[('PosterizeIncreasing', 0.8, 5), ('Equalize', 1.0, 2)],
|
518 |
+
[('Rotate', 0.2, 3), ('Solarize', 0.6, 8)],
|
519 |
+
[('Equalize', 0.6, 8), ('PosterizeIncreasing', 0.4, 6)],
|
520 |
+
[('Rotate', 0.8, 8), ('Color', 0.4, 0)],
|
521 |
+
[('Rotate', 0.4, 9), ('Equalize', 0.6, 2)],
|
522 |
+
[('Equalize', 0.0, 7), ('Equalize', 0.8, 8)],
|
523 |
+
[('Invert', 0.6, 4), ('Equalize', 1.0, 8)],
|
524 |
+
[('Color', 0.6, 4), ('Contrast', 1.0, 8)],
|
525 |
+
[('Rotate', 0.8, 8), ('Color', 1.0, 2)],
|
526 |
+
[('Color', 0.8, 8), ('Solarize', 0.8, 7)],
|
527 |
+
[('Sharpness', 0.4, 7), ('Invert', 0.6, 8)],
|
528 |
+
[('ShearX', 0.6, 5), ('Equalize', 1.0, 9)],
|
529 |
+
[('Color', 0.4, 0), ('Equalize', 0.6, 3)],
|
530 |
+
[('Equalize', 0.4, 7), ('Solarize', 0.2, 4)],
|
531 |
+
[('Solarize', 0.6, 5), ('AutoContrast', 0.6, 5)],
|
532 |
+
[('Invert', 0.6, 4), ('Equalize', 1.0, 8)],
|
533 |
+
[('Color', 0.6, 4), ('Contrast', 1.0, 8)],
|
534 |
+
[('Equalize', 0.8, 8), ('Equalize', 0.6, 3)],
|
535 |
+
]
|
536 |
+
pc = [[AugmentOp(*a, hparams=hparams) for a in sp] for sp in policy]
|
537 |
+
return pc
|
538 |
+
|
539 |
+
|
540 |
+
def auto_augment_policy_3a(hparams):
|
541 |
+
policy = [
|
542 |
+
[('Solarize', 1.0, 5)], # 128 solarize threshold @ 5 magnitude
|
543 |
+
[('Desaturate', 1.0, 10)], # grayscale at 10 magnitude
|
544 |
+
[('GaussianBlurRand', 1.0, 10)],
|
545 |
+
]
|
546 |
+
pc = [[AugmentOp(*a, hparams=hparams) for a in sp] for sp in policy]
|
547 |
+
return pc
|
548 |
+
|
549 |
+
|
550 |
+
def auto_augment_policy(name='v0', hparams=None):
|
551 |
+
hparams = hparams or _HPARAMS_DEFAULT
|
552 |
+
if name == 'original':
|
553 |
+
return auto_augment_policy_original(hparams)
|
554 |
+
if name == 'originalr':
|
555 |
+
return auto_augment_policy_originalr(hparams)
|
556 |
+
if name == 'v0':
|
557 |
+
return auto_augment_policy_v0(hparams)
|
558 |
+
if name == 'v0r':
|
559 |
+
return auto_augment_policy_v0r(hparams)
|
560 |
+
if name == '3a':
|
561 |
+
return auto_augment_policy_3a(hparams)
|
562 |
+
assert False, f'Unknown AA policy {name}'
|
563 |
+
|
564 |
+
|
565 |
+
class AutoAugment:
|
566 |
+
|
567 |
+
def __init__(self, policy):
|
568 |
+
self.policy = policy
|
569 |
+
|
570 |
+
def __call__(self, img):
|
571 |
+
sub_policy = random.choice(self.policy)
|
572 |
+
for op in sub_policy:
|
573 |
+
img = op(img)
|
574 |
+
return img
|
575 |
+
|
576 |
+
def __repr__(self):
|
577 |
+
fs = self.__class__.__name__ + '(policy='
|
578 |
+
for p in self.policy:
|
579 |
+
fs += '\n\t['
|
580 |
+
fs += ', '.join([str(op) for op in p])
|
581 |
+
fs += ']'
|
582 |
+
fs += ')'
|
583 |
+
return fs
|
584 |
+
|
585 |
+
|
586 |
+
def auto_augment_transform(config_str: str, hparams: Optional[Dict] = None):
|
587 |
+
""" Create a AutoAugment transform
|
588 |
+
|
589 |
+
Args:
|
590 |
+
config_str: String defining configuration of auto augmentation. Consists of multiple sections separated by
|
591 |
+
dashes ('-').
|
592 |
+
The first section defines the AutoAugment policy (one of 'v0', 'v0r', 'original', 'originalr').
|
593 |
+
While the remaining sections define other arguments
|
594 |
+
* 'mstd' - float std deviation of magnitude noise applied
|
595 |
+
hparams: Other hparams (kwargs) for the AutoAugmentation scheme
|
596 |
+
|
597 |
+
Returns:
|
598 |
+
A PyTorch compatible Transform
|
599 |
+
|
600 |
+
Examples::
|
601 |
+
|
602 |
+
'original-mstd0.5' results in AutoAugment with original policy, magnitude_std 0.5
|
603 |
+
"""
|
604 |
+
config = config_str.split('-')
|
605 |
+
policy_name = config[0]
|
606 |
+
config = config[1:]
|
607 |
+
for c in config:
|
608 |
+
cs = re.split(r'(\d.*)', c)
|
609 |
+
if len(cs) < 2:
|
610 |
+
continue
|
611 |
+
key, val = cs[:2]
|
612 |
+
if key == 'mstd':
|
613 |
+
# noise param injected via hparams for now
|
614 |
+
hparams.setdefault('magnitude_std', float(val))
|
615 |
+
else:
|
616 |
+
assert False, 'Unknown AutoAugment config section'
|
617 |
+
aa_policy = auto_augment_policy(policy_name, hparams=hparams)
|
618 |
+
return AutoAugment(aa_policy)
|
619 |
+
|
620 |
+
|
621 |
+
_RAND_TRANSFORMS = [
|
622 |
+
'AutoContrast',
|
623 |
+
'Equalize',
|
624 |
+
'Invert',
|
625 |
+
'Rotate',
|
626 |
+
'Posterize',
|
627 |
+
'Solarize',
|
628 |
+
'SolarizeAdd',
|
629 |
+
'Color',
|
630 |
+
'Contrast',
|
631 |
+
'Brightness',
|
632 |
+
'Sharpness',
|
633 |
+
'ShearX',
|
634 |
+
'ShearY',
|
635 |
+
'TranslateXRel',
|
636 |
+
'TranslateYRel',
|
637 |
+
# 'Cutout' # NOTE I've implement this as random erasing separately
|
638 |
+
]
|
639 |
+
|
640 |
+
|
641 |
+
_RAND_INCREASING_TRANSFORMS = [
|
642 |
+
'AutoContrast',
|
643 |
+
'Equalize',
|
644 |
+
'Invert',
|
645 |
+
'Rotate',
|
646 |
+
'PosterizeIncreasing',
|
647 |
+
'SolarizeIncreasing',
|
648 |
+
'SolarizeAdd',
|
649 |
+
'ColorIncreasing',
|
650 |
+
'ContrastIncreasing',
|
651 |
+
'BrightnessIncreasing',
|
652 |
+
'SharpnessIncreasing',
|
653 |
+
'ShearX',
|
654 |
+
'ShearY',
|
655 |
+
'TranslateXRel',
|
656 |
+
'TranslateYRel',
|
657 |
+
# 'Cutout' # NOTE I've implement this as random erasing separately
|
658 |
+
]
|
659 |
+
|
660 |
+
|
661 |
+
_RAND_3A = [
|
662 |
+
'SolarizeIncreasing',
|
663 |
+
'Desaturate',
|
664 |
+
'GaussianBlur',
|
665 |
+
]
|
666 |
+
|
667 |
+
|
668 |
+
_RAND_WEIGHTED_3A = {
|
669 |
+
'SolarizeIncreasing': 6,
|
670 |
+
'Desaturate': 6,
|
671 |
+
'GaussianBlur': 6,
|
672 |
+
'Rotate': 3,
|
673 |
+
'ShearX': 2,
|
674 |
+
'ShearY': 2,
|
675 |
+
'PosterizeIncreasing': 1,
|
676 |
+
'AutoContrast': 1,
|
677 |
+
'ColorIncreasing': 1,
|
678 |
+
'SharpnessIncreasing': 1,
|
679 |
+
'ContrastIncreasing': 1,
|
680 |
+
'BrightnessIncreasing': 1,
|
681 |
+
'Equalize': 1,
|
682 |
+
'Invert': 1,
|
683 |
+
}
|
684 |
+
|
685 |
+
|
686 |
+
# These experimental weights are based loosely on the relative improvements mentioned in paper.
|
687 |
+
# They may not result in increased performance, but could likely be tuned to so.
|
688 |
+
_RAND_WEIGHTED_0 = {
|
689 |
+
'Rotate': 3,
|
690 |
+
'ShearX': 2,
|
691 |
+
'ShearY': 2,
|
692 |
+
'TranslateXRel': 1,
|
693 |
+
'TranslateYRel': 1,
|
694 |
+
'ColorIncreasing': .25,
|
695 |
+
'SharpnessIncreasing': 0.25,
|
696 |
+
'AutoContrast': 0.25,
|
697 |
+
'SolarizeIncreasing': .05,
|
698 |
+
'SolarizeAdd': .05,
|
699 |
+
'ContrastIncreasing': .05,
|
700 |
+
'BrightnessIncreasing': .05,
|
701 |
+
'Equalize': .05,
|
702 |
+
'PosterizeIncreasing': 0.05,
|
703 |
+
'Invert': 0.05,
|
704 |
+
}
|
705 |
+
|
706 |
+
|
707 |
+
def _get_weighted_transforms(transforms: Dict):
|
708 |
+
transforms, probs = list(zip(*transforms.items()))
|
709 |
+
probs = np.array(probs)
|
710 |
+
probs = probs / np.sum(probs)
|
711 |
+
return transforms, probs
|
712 |
+
|
713 |
+
|
714 |
+
def rand_augment_choices(name: str, increasing=True):
|
715 |
+
if name == 'weights':
|
716 |
+
return _RAND_WEIGHTED_0
|
717 |
+
if name == '3aw':
|
718 |
+
return _RAND_WEIGHTED_3A
|
719 |
+
if name == '3a':
|
720 |
+
return _RAND_3A
|
721 |
+
return _RAND_INCREASING_TRANSFORMS if increasing else _RAND_TRANSFORMS
|
722 |
+
|
723 |
+
|
724 |
+
def rand_augment_ops(
|
725 |
+
magnitude: Union[int, float] = 10,
|
726 |
+
prob: float = 0.5,
|
727 |
+
hparams: Optional[Dict] = None,
|
728 |
+
transforms: Optional[Union[Dict, List]] = None,
|
729 |
+
):
|
730 |
+
hparams = hparams or _HPARAMS_DEFAULT
|
731 |
+
transforms = transforms or _RAND_TRANSFORMS
|
732 |
+
return [AugmentOp(
|
733 |
+
name, prob=prob, magnitude=magnitude, hparams=hparams) for name in transforms]
|
734 |
+
|
735 |
+
|
736 |
+
class RandAugment:
|
737 |
+
def __init__(self, ops, num_layers=2, choice_weights=None):
|
738 |
+
self.ops = ops
|
739 |
+
self.num_layers = num_layers
|
740 |
+
self.choice_weights = choice_weights
|
741 |
+
|
742 |
+
def __call__(self, img):
|
743 |
+
# no replacement when using weighted choice
|
744 |
+
ops = np.random.choice(
|
745 |
+
self.ops,
|
746 |
+
self.num_layers,
|
747 |
+
replace=self.choice_weights is None,
|
748 |
+
p=self.choice_weights,
|
749 |
+
)
|
750 |
+
for op in ops:
|
751 |
+
img = op(img)
|
752 |
+
return img
|
753 |
+
|
754 |
+
def __repr__(self):
|
755 |
+
fs = self.__class__.__name__ + f'(n={self.num_layers}, ops='
|
756 |
+
for op in self.ops:
|
757 |
+
fs += f'\n\t{op}'
|
758 |
+
fs += ')'
|
759 |
+
return fs
|
760 |
+
|
761 |
+
|
762 |
+
def rand_augment_transform(
|
763 |
+
config_str: str,
|
764 |
+
hparams: Optional[Dict] = None,
|
765 |
+
transforms: Optional[Union[str, Dict, List]] = None,
|
766 |
+
):
|
767 |
+
""" Create a RandAugment transform
|
768 |
+
|
769 |
+
Args:
|
770 |
+
config_str (str): String defining configuration of random augmentation. Consists of multiple sections separated
|
771 |
+
by dashes ('-'). The first section defines the specific variant of rand augment (currently only 'rand').
|
772 |
+
The remaining sections, not order specific determine
|
773 |
+
* 'm' - integer magnitude of rand augment
|
774 |
+
* 'n' - integer num layers (number of transform ops selected per image)
|
775 |
+
* 'p' - float probability of applying each layer (default 0.5)
|
776 |
+
* 'mstd' - float std deviation of magnitude noise applied, or uniform sampling if infinity (or > 100)
|
777 |
+
* 'mmax' - set upper bound for magnitude to something other than default of _LEVEL_DENOM (10)
|
778 |
+
* 'inc' - integer (bool), use augmentations that increase in severity with magnitude (default: 0)
|
779 |
+
* 't' - str name of transform set to use
|
780 |
+
hparams (dict): Other hparams (kwargs) for the RandAugmentation scheme
|
781 |
+
|
782 |
+
Returns:
|
783 |
+
A PyTorch compatible Transform
|
784 |
+
|
785 |
+
Examples::
|
786 |
+
|
787 |
+
'rand-m9-n3-mstd0.5' results in RandAugment with magnitude 9, num_layers 3, magnitude_std 0.5
|
788 |
+
|
789 |
+
'rand-mstd1-tweights' results in mag std 1.0, weighted transforms, default mag of 10 and num_layers 2
|
790 |
+
|
791 |
+
"""
|
792 |
+
magnitude = _LEVEL_DENOM # default to _LEVEL_DENOM for magnitude (currently 10)
|
793 |
+
num_layers = 2 # default to 2 ops per image
|
794 |
+
increasing = False
|
795 |
+
prob = 0.5
|
796 |
+
config = config_str.split('-')
|
797 |
+
assert config[0] == 'rand'
|
798 |
+
config = config[1:]
|
799 |
+
for c in config:
|
800 |
+
if c.startswith('t'):
|
801 |
+
# NOTE old 'w' key was removed, 'w0' is not equivalent to 'tweights'
|
802 |
+
val = str(c[1:])
|
803 |
+
if transforms is None:
|
804 |
+
transforms = val
|
805 |
+
else:
|
806 |
+
# numeric options
|
807 |
+
cs = re.split(r'(\d.*)', c)
|
808 |
+
if len(cs) < 2:
|
809 |
+
continue
|
810 |
+
key, val = cs[:2]
|
811 |
+
if key == 'mstd':
|
812 |
+
# noise param / randomization of magnitude values
|
813 |
+
mstd = float(val)
|
814 |
+
if mstd > 100:
|
815 |
+
# use uniform sampling in 0 to magnitude if mstd is > 100
|
816 |
+
mstd = float('inf')
|
817 |
+
hparams.setdefault('magnitude_std', mstd)
|
818 |
+
elif key == 'mmax':
|
819 |
+
# clip magnitude between [0, mmax] instead of default [0, _LEVEL_DENOM]
|
820 |
+
hparams.setdefault('magnitude_max', int(val))
|
821 |
+
elif key == 'inc':
|
822 |
+
if bool(val):
|
823 |
+
increasing = True
|
824 |
+
elif key == 'm':
|
825 |
+
magnitude = int(val)
|
826 |
+
elif key == 'n':
|
827 |
+
num_layers = int(val)
|
828 |
+
elif key == 'p':
|
829 |
+
prob = float(val)
|
830 |
+
else:
|
831 |
+
assert False, 'Unknown RandAugment config section'
|
832 |
+
|
833 |
+
if isinstance(transforms, str):
|
834 |
+
transforms = rand_augment_choices(transforms, increasing=increasing)
|
835 |
+
elif transforms is None:
|
836 |
+
transforms = _RAND_INCREASING_TRANSFORMS if increasing else _RAND_TRANSFORMS
|
837 |
+
|
838 |
+
choice_weights = None
|
839 |
+
if isinstance(transforms, Dict):
|
840 |
+
transforms, choice_weights = _get_weighted_transforms(transforms)
|
841 |
+
|
842 |
+
ra_ops = rand_augment_ops(magnitude=magnitude, prob=prob, hparams=hparams, transforms=transforms)
|
843 |
+
return RandAugment(ra_ops, num_layers, choice_weights=choice_weights)
|
844 |
+
|
845 |
+
|
846 |
+
_AUGMIX_TRANSFORMS = [
|
847 |
+
'AutoContrast',
|
848 |
+
'ColorIncreasing', # not in paper
|
849 |
+
'ContrastIncreasing', # not in paper
|
850 |
+
'BrightnessIncreasing', # not in paper
|
851 |
+
'SharpnessIncreasing', # not in paper
|
852 |
+
'Equalize',
|
853 |
+
'Rotate',
|
854 |
+
'PosterizeIncreasing',
|
855 |
+
'SolarizeIncreasing',
|
856 |
+
'ShearX',
|
857 |
+
'ShearY',
|
858 |
+
'TranslateXRel',
|
859 |
+
'TranslateYRel',
|
860 |
+
]
|
861 |
+
|
862 |
+
|
863 |
+
def augmix_ops(
|
864 |
+
magnitude: Union[int, float] = 10,
|
865 |
+
hparams: Optional[Dict] = None,
|
866 |
+
transforms: Optional[Union[str, Dict, List]] = None,
|
867 |
+
):
|
868 |
+
hparams = hparams or _HPARAMS_DEFAULT
|
869 |
+
transforms = transforms or _AUGMIX_TRANSFORMS
|
870 |
+
return [AugmentOp(
|
871 |
+
name,
|
872 |
+
prob=1.0,
|
873 |
+
magnitude=magnitude,
|
874 |
+
hparams=hparams
|
875 |
+
) for name in transforms]
|
876 |
+
|
877 |
+
|
878 |
+
class AugMixAugment:
|
879 |
+
""" AugMix Transform
|
880 |
+
Adapted and improved from impl here: https://github.com/google-research/augmix/blob/master/imagenet.py
|
881 |
+
From paper: 'AugMix: A Simple Data Processing Method to Improve Robustness and Uncertainty -
|
882 |
+
https://arxiv.org/abs/1912.02781
|
883 |
+
"""
|
884 |
+
def __init__(self, ops, alpha=1., width=3, depth=-1, blended=False):
|
885 |
+
self.ops = ops
|
886 |
+
self.alpha = alpha
|
887 |
+
self.width = width
|
888 |
+
self.depth = depth
|
889 |
+
self.blended = blended # blended mode is faster but not well tested
|
890 |
+
|
891 |
+
def _calc_blended_weights(self, ws, m):
|
892 |
+
ws = ws * m
|
893 |
+
cump = 1.
|
894 |
+
rws = []
|
895 |
+
for w in ws[::-1]:
|
896 |
+
alpha = w / cump
|
897 |
+
cump *= (1 - alpha)
|
898 |
+
rws.append(alpha)
|
899 |
+
return np.array(rws[::-1], dtype=np.float32)
|
900 |
+
|
901 |
+
def _apply_blended(self, img, mixing_weights, m):
|
902 |
+
# This is my first crack and implementing a slightly faster mixed augmentation. Instead
|
903 |
+
# of accumulating the mix for each chain in a Numpy array and then blending with original,
|
904 |
+
# it recomputes the blending coefficients and applies one PIL image blend per chain.
|
905 |
+
# TODO the results appear in the right ballpark but they differ by more than rounding.
|
906 |
+
img_orig = img.copy()
|
907 |
+
ws = self._calc_blended_weights(mixing_weights, m)
|
908 |
+
for w in ws:
|
909 |
+
depth = self.depth if self.depth > 0 else np.random.randint(1, 4)
|
910 |
+
ops = np.random.choice(self.ops, depth, replace=True)
|
911 |
+
img_aug = img_orig # no ops are in-place, deep copy not necessary
|
912 |
+
for op in ops:
|
913 |
+
img_aug = op(img_aug)
|
914 |
+
img = Image.blend(img, img_aug, w)
|
915 |
+
return img
|
916 |
+
|
917 |
+
def _apply_basic(self, img, mixing_weights, m):
|
918 |
+
# This is a literal adaptation of the paper/official implementation without normalizations and
|
919 |
+
# PIL <-> Numpy conversions between every op. It is still quite CPU compute heavy compared to the
|
920 |
+
# typical augmentation transforms, could use a GPU / Kornia implementation.
|
921 |
+
img_shape = img.size[0], img.size[1], len(img.getbands())
|
922 |
+
mixed = np.zeros(img_shape, dtype=np.float32)
|
923 |
+
for mw in mixing_weights:
|
924 |
+
depth = self.depth if self.depth > 0 else np.random.randint(1, 4)
|
925 |
+
ops = np.random.choice(self.ops, depth, replace=True)
|
926 |
+
img_aug = img # no ops are in-place, deep copy not necessary
|
927 |
+
for op in ops:
|
928 |
+
img_aug = op(img_aug)
|
929 |
+
mixed += mw * np.asarray(img_aug, dtype=np.float32)
|
930 |
+
np.clip(mixed, 0, 255., out=mixed)
|
931 |
+
mixed = Image.fromarray(mixed.astype(np.uint8))
|
932 |
+
return Image.blend(img, mixed, m)
|
933 |
+
|
934 |
+
def __call__(self, img):
|
935 |
+
mixing_weights = np.float32(np.random.dirichlet([self.alpha] * self.width))
|
936 |
+
m = np.float32(np.random.beta(self.alpha, self.alpha))
|
937 |
+
if self.blended:
|
938 |
+
mixed = self._apply_blended(img, mixing_weights, m)
|
939 |
+
else:
|
940 |
+
mixed = self._apply_basic(img, mixing_weights, m)
|
941 |
+
return mixed
|
942 |
+
|
943 |
+
def __repr__(self):
|
944 |
+
fs = self.__class__.__name__ + f'(alpha={self.alpha}, width={self.width}, depth={self.depth}, ops='
|
945 |
+
for op in self.ops:
|
946 |
+
fs += f'\n\t{op}'
|
947 |
+
fs += ')'
|
948 |
+
return fs
|
949 |
+
|
950 |
+
|
951 |
+
def augment_and_mix_transform(config_str: str, hparams: Optional[Dict] = None):
|
952 |
+
""" Create AugMix PyTorch transform
|
953 |
+
|
954 |
+
Args:
|
955 |
+
config_str (str): String defining configuration of random augmentation. Consists of multiple sections separated
|
956 |
+
by dashes ('-'). The first section defines the specific variant of rand augment (currently only 'rand').
|
957 |
+
The remaining sections, not order sepecific determine
|
958 |
+
'm' - integer magnitude (severity) of augmentation mix (default: 3)
|
959 |
+
'w' - integer width of augmentation chain (default: 3)
|
960 |
+
'd' - integer depth of augmentation chain (-1 is random [1, 3], default: -1)
|
961 |
+
'b' - integer (bool), blend each branch of chain into end result without a final blend, less CPU (default: 0)
|
962 |
+
'mstd' - float std deviation of magnitude noise applied (default: 0)
|
963 |
+
Ex 'augmix-m5-w4-d2' results in AugMix with severity 5, chain width 4, chain depth 2
|
964 |
+
|
965 |
+
hparams: Other hparams (kwargs) for the Augmentation transforms
|
966 |
+
|
967 |
+
Returns:
|
968 |
+
A PyTorch compatible Transform
|
969 |
+
"""
|
970 |
+
magnitude = 3
|
971 |
+
width = 3
|
972 |
+
depth = -1
|
973 |
+
alpha = 1.
|
974 |
+
blended = False
|
975 |
+
config = config_str.split('-')
|
976 |
+
assert config[0] == 'augmix'
|
977 |
+
config = config[1:]
|
978 |
+
for c in config:
|
979 |
+
cs = re.split(r'(\d.*)', c)
|
980 |
+
if len(cs) < 2:
|
981 |
+
continue
|
982 |
+
key, val = cs[:2]
|
983 |
+
if key == 'mstd':
|
984 |
+
# noise param injected via hparams for now
|
985 |
+
hparams.setdefault('magnitude_std', float(val))
|
986 |
+
elif key == 'm':
|
987 |
+
magnitude = int(val)
|
988 |
+
elif key == 'w':
|
989 |
+
width = int(val)
|
990 |
+
elif key == 'd':
|
991 |
+
depth = int(val)
|
992 |
+
elif key == 'a':
|
993 |
+
alpha = float(val)
|
994 |
+
elif key == 'b':
|
995 |
+
blended = bool(val)
|
996 |
+
else:
|
997 |
+
assert False, 'Unknown AugMix config section'
|
998 |
+
hparams.setdefault('magnitude_std', float('inf')) # default to uniform sampling (if not set via mstd arg)
|
999 |
+
ops = augmix_ops(magnitude=magnitude, hparams=hparams)
|
1000 |
+
return AugMixAugment(ops, alpha=alpha, width=width, depth=depth, blended=blended)
|
pytorch-image-models/timm/data/config.py
ADDED
@@ -0,0 +1,129 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import logging
|
2 |
+
from .constants import *
|
3 |
+
|
4 |
+
|
5 |
+
_logger = logging.getLogger(__name__)
|
6 |
+
|
7 |
+
|
8 |
+
def resolve_data_config(
|
9 |
+
args=None,
|
10 |
+
pretrained_cfg=None,
|
11 |
+
model=None,
|
12 |
+
use_test_size=False,
|
13 |
+
verbose=False
|
14 |
+
):
|
15 |
+
assert model or args or pretrained_cfg, "At least one of model, args, or pretrained_cfg required for data config."
|
16 |
+
args = args or {}
|
17 |
+
pretrained_cfg = pretrained_cfg or {}
|
18 |
+
if not pretrained_cfg and model is not None and hasattr(model, 'pretrained_cfg'):
|
19 |
+
pretrained_cfg = model.pretrained_cfg
|
20 |
+
data_config = {}
|
21 |
+
|
22 |
+
# Resolve input/image size
|
23 |
+
in_chans = 3
|
24 |
+
if args.get('in_chans', None) is not None:
|
25 |
+
in_chans = args['in_chans']
|
26 |
+
elif args.get('chans', None) is not None:
|
27 |
+
in_chans = args['chans']
|
28 |
+
|
29 |
+
input_size = (in_chans, 224, 224)
|
30 |
+
if args.get('input_size', None) is not None:
|
31 |
+
assert isinstance(args['input_size'], (tuple, list))
|
32 |
+
assert len(args['input_size']) == 3
|
33 |
+
input_size = tuple(args['input_size'])
|
34 |
+
in_chans = input_size[0] # input_size overrides in_chans
|
35 |
+
elif args.get('img_size', None) is not None:
|
36 |
+
assert isinstance(args['img_size'], int)
|
37 |
+
input_size = (in_chans, args['img_size'], args['img_size'])
|
38 |
+
else:
|
39 |
+
if use_test_size and pretrained_cfg.get('test_input_size', None) is not None:
|
40 |
+
input_size = pretrained_cfg['test_input_size']
|
41 |
+
elif pretrained_cfg.get('input_size', None) is not None:
|
42 |
+
input_size = pretrained_cfg['input_size']
|
43 |
+
data_config['input_size'] = input_size
|
44 |
+
|
45 |
+
# resolve interpolation method
|
46 |
+
data_config['interpolation'] = 'bicubic'
|
47 |
+
if args.get('interpolation', None):
|
48 |
+
data_config['interpolation'] = args['interpolation']
|
49 |
+
elif pretrained_cfg.get('interpolation', None):
|
50 |
+
data_config['interpolation'] = pretrained_cfg['interpolation']
|
51 |
+
|
52 |
+
# resolve dataset + model mean for normalization
|
53 |
+
data_config['mean'] = IMAGENET_DEFAULT_MEAN
|
54 |
+
if args.get('mean', None) is not None:
|
55 |
+
mean = tuple(args['mean'])
|
56 |
+
if len(mean) == 1:
|
57 |
+
mean = tuple(list(mean) * in_chans)
|
58 |
+
else:
|
59 |
+
assert len(mean) == in_chans
|
60 |
+
data_config['mean'] = mean
|
61 |
+
elif pretrained_cfg.get('mean', None):
|
62 |
+
data_config['mean'] = pretrained_cfg['mean']
|
63 |
+
|
64 |
+
# resolve dataset + model std deviation for normalization
|
65 |
+
data_config['std'] = IMAGENET_DEFAULT_STD
|
66 |
+
if args.get('std', None) is not None:
|
67 |
+
std = tuple(args['std'])
|
68 |
+
if len(std) == 1:
|
69 |
+
std = tuple(list(std) * in_chans)
|
70 |
+
else:
|
71 |
+
assert len(std) == in_chans
|
72 |
+
data_config['std'] = std
|
73 |
+
elif pretrained_cfg.get('std', None):
|
74 |
+
data_config['std'] = pretrained_cfg['std']
|
75 |
+
|
76 |
+
# resolve default inference crop
|
77 |
+
crop_pct = DEFAULT_CROP_PCT
|
78 |
+
if args.get('crop_pct', None):
|
79 |
+
crop_pct = args['crop_pct']
|
80 |
+
else:
|
81 |
+
if use_test_size and pretrained_cfg.get('test_crop_pct', None):
|
82 |
+
crop_pct = pretrained_cfg['test_crop_pct']
|
83 |
+
elif pretrained_cfg.get('crop_pct', None):
|
84 |
+
crop_pct = pretrained_cfg['crop_pct']
|
85 |
+
data_config['crop_pct'] = crop_pct
|
86 |
+
|
87 |
+
# resolve default crop percentage
|
88 |
+
crop_mode = DEFAULT_CROP_MODE
|
89 |
+
if args.get('crop_mode', None):
|
90 |
+
crop_mode = args['crop_mode']
|
91 |
+
elif pretrained_cfg.get('crop_mode', None):
|
92 |
+
crop_mode = pretrained_cfg['crop_mode']
|
93 |
+
data_config['crop_mode'] = crop_mode
|
94 |
+
|
95 |
+
if verbose:
|
96 |
+
_logger.info('Data processing configuration for current model + dataset:')
|
97 |
+
for n, v in data_config.items():
|
98 |
+
_logger.info('\t%s: %s' % (n, str(v)))
|
99 |
+
|
100 |
+
return data_config
|
101 |
+
|
102 |
+
|
103 |
+
def resolve_model_data_config(
|
104 |
+
model,
|
105 |
+
args=None,
|
106 |
+
pretrained_cfg=None,
|
107 |
+
use_test_size=False,
|
108 |
+
verbose=False,
|
109 |
+
):
|
110 |
+
""" Resolve Model Data Config
|
111 |
+
This is equivalent to resolve_data_config() but with arguments re-ordered to put model first.
|
112 |
+
|
113 |
+
Args:
|
114 |
+
model (nn.Module): the model instance
|
115 |
+
args (dict): command line arguments / configuration in dict form (overrides pretrained_cfg)
|
116 |
+
pretrained_cfg (dict): pretrained model config (overrides pretrained_cfg attached to model)
|
117 |
+
use_test_size (bool): use the test time input resolution (if one exists) instead of default train resolution
|
118 |
+
verbose (bool): enable extra logging of resolved values
|
119 |
+
|
120 |
+
Returns:
|
121 |
+
dictionary of config
|
122 |
+
"""
|
123 |
+
return resolve_data_config(
|
124 |
+
args=args,
|
125 |
+
pretrained_cfg=pretrained_cfg,
|
126 |
+
model=model,
|
127 |
+
use_test_size=use_test_size,
|
128 |
+
verbose=verbose,
|
129 |
+
)
|
pytorch-image-models/timm/data/constants.py
ADDED
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
DEFAULT_CROP_PCT = 0.875
|
2 |
+
DEFAULT_CROP_MODE = 'center'
|
3 |
+
IMAGENET_DEFAULT_MEAN = (0.485, 0.456, 0.406)
|
4 |
+
IMAGENET_DEFAULT_STD = (0.229, 0.224, 0.225)
|
5 |
+
IMAGENET_INCEPTION_MEAN = (0.5, 0.5, 0.5)
|
6 |
+
IMAGENET_INCEPTION_STD = (0.5, 0.5, 0.5)
|
7 |
+
IMAGENET_DPN_MEAN = (124 / 255, 117 / 255, 104 / 255)
|
8 |
+
IMAGENET_DPN_STD = tuple([1 / (.0167 * 255)] * 3)
|
9 |
+
OPENAI_CLIP_MEAN = (0.48145466, 0.4578275, 0.40821073)
|
10 |
+
OPENAI_CLIP_STD = (0.26862954, 0.26130258, 0.27577711)
|
pytorch-image-models/timm/data/dataset.py
ADDED
@@ -0,0 +1,204 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
""" Quick n Simple Image Folder, Tarfile based DataSet
|
2 |
+
|
3 |
+
Hacked together by / Copyright 2019, Ross Wightman
|
4 |
+
"""
|
5 |
+
import io
|
6 |
+
import logging
|
7 |
+
from typing import Optional
|
8 |
+
|
9 |
+
import torch
|
10 |
+
import torch.utils.data as data
|
11 |
+
from PIL import Image
|
12 |
+
|
13 |
+
from .readers import create_reader
|
14 |
+
|
15 |
+
_logger = logging.getLogger(__name__)
|
16 |
+
|
17 |
+
|
18 |
+
_ERROR_RETRY = 50
|
19 |
+
|
20 |
+
|
21 |
+
class ImageDataset(data.Dataset):
|
22 |
+
|
23 |
+
def __init__(
|
24 |
+
self,
|
25 |
+
root,
|
26 |
+
reader=None,
|
27 |
+
split='train',
|
28 |
+
class_map=None,
|
29 |
+
load_bytes=False,
|
30 |
+
input_img_mode='RGB',
|
31 |
+
transform=None,
|
32 |
+
target_transform=None,
|
33 |
+
**kwargs,
|
34 |
+
):
|
35 |
+
if reader is None or isinstance(reader, str):
|
36 |
+
reader = create_reader(
|
37 |
+
reader or '',
|
38 |
+
root=root,
|
39 |
+
split=split,
|
40 |
+
class_map=class_map,
|
41 |
+
**kwargs,
|
42 |
+
)
|
43 |
+
self.reader = reader
|
44 |
+
self.load_bytes = load_bytes
|
45 |
+
self.input_img_mode = input_img_mode
|
46 |
+
self.transform = transform
|
47 |
+
self.target_transform = target_transform
|
48 |
+
self._consecutive_errors = 0
|
49 |
+
|
50 |
+
def __getitem__(self, index):
|
51 |
+
img, target = self.reader[index]
|
52 |
+
|
53 |
+
try:
|
54 |
+
img = img.read() if self.load_bytes else Image.open(img)
|
55 |
+
except Exception as e:
|
56 |
+
_logger.warning(f'Skipped sample (index {index}, file {self.reader.filename(index)}). {str(e)}')
|
57 |
+
self._consecutive_errors += 1
|
58 |
+
if self._consecutive_errors < _ERROR_RETRY:
|
59 |
+
return self.__getitem__((index + 1) % len(self.reader))
|
60 |
+
else:
|
61 |
+
raise e
|
62 |
+
self._consecutive_errors = 0
|
63 |
+
|
64 |
+
if self.input_img_mode and not self.load_bytes:
|
65 |
+
img = img.convert(self.input_img_mode)
|
66 |
+
if self.transform is not None:
|
67 |
+
img = self.transform(img)
|
68 |
+
|
69 |
+
if target is None:
|
70 |
+
target = -1
|
71 |
+
elif self.target_transform is not None:
|
72 |
+
target = self.target_transform(target)
|
73 |
+
|
74 |
+
return img, target
|
75 |
+
|
76 |
+
def __len__(self):
|
77 |
+
return len(self.reader)
|
78 |
+
|
79 |
+
def filename(self, index, basename=False, absolute=False):
|
80 |
+
return self.reader.filename(index, basename, absolute)
|
81 |
+
|
82 |
+
def filenames(self, basename=False, absolute=False):
|
83 |
+
return self.reader.filenames(basename, absolute)
|
84 |
+
|
85 |
+
|
86 |
+
class IterableImageDataset(data.IterableDataset):
|
87 |
+
|
88 |
+
def __init__(
|
89 |
+
self,
|
90 |
+
root,
|
91 |
+
reader=None,
|
92 |
+
split='train',
|
93 |
+
class_map=None,
|
94 |
+
is_training=False,
|
95 |
+
batch_size=1,
|
96 |
+
num_samples=None,
|
97 |
+
seed=42,
|
98 |
+
repeats=0,
|
99 |
+
download=False,
|
100 |
+
input_img_mode='RGB',
|
101 |
+
input_key=None,
|
102 |
+
target_key=None,
|
103 |
+
transform=None,
|
104 |
+
target_transform=None,
|
105 |
+
max_steps=None,
|
106 |
+
**kwargs,
|
107 |
+
):
|
108 |
+
assert reader is not None
|
109 |
+
if isinstance(reader, str):
|
110 |
+
self.reader = create_reader(
|
111 |
+
reader,
|
112 |
+
root=root,
|
113 |
+
split=split,
|
114 |
+
class_map=class_map,
|
115 |
+
is_training=is_training,
|
116 |
+
batch_size=batch_size,
|
117 |
+
num_samples=num_samples,
|
118 |
+
seed=seed,
|
119 |
+
repeats=repeats,
|
120 |
+
download=download,
|
121 |
+
input_img_mode=input_img_mode,
|
122 |
+
input_key=input_key,
|
123 |
+
target_key=target_key,
|
124 |
+
max_steps=max_steps,
|
125 |
+
**kwargs,
|
126 |
+
)
|
127 |
+
else:
|
128 |
+
self.reader = reader
|
129 |
+
self.transform = transform
|
130 |
+
self.target_transform = target_transform
|
131 |
+
self._consecutive_errors = 0
|
132 |
+
|
133 |
+
def __iter__(self):
|
134 |
+
for img, target in self.reader:
|
135 |
+
if self.transform is not None:
|
136 |
+
img = self.transform(img)
|
137 |
+
if self.target_transform is not None:
|
138 |
+
target = self.target_transform(target)
|
139 |
+
yield img, target
|
140 |
+
|
141 |
+
def __len__(self):
|
142 |
+
if hasattr(self.reader, '__len__'):
|
143 |
+
return len(self.reader)
|
144 |
+
else:
|
145 |
+
return 0
|
146 |
+
|
147 |
+
def set_epoch(self, count):
|
148 |
+
# TFDS and WDS need external epoch count for deterministic cross process shuffle
|
149 |
+
if hasattr(self.reader, 'set_epoch'):
|
150 |
+
self.reader.set_epoch(count)
|
151 |
+
|
152 |
+
def set_loader_cfg(
|
153 |
+
self,
|
154 |
+
num_workers: Optional[int] = None,
|
155 |
+
):
|
156 |
+
# TFDS and WDS readers need # workers for correct # samples estimate before loader processes created
|
157 |
+
if hasattr(self.reader, 'set_loader_cfg'):
|
158 |
+
self.reader.set_loader_cfg(num_workers=num_workers)
|
159 |
+
|
160 |
+
def filename(self, index, basename=False, absolute=False):
|
161 |
+
assert False, 'Filename lookup by index not supported, use filenames().'
|
162 |
+
|
163 |
+
def filenames(self, basename=False, absolute=False):
|
164 |
+
return self.reader.filenames(basename, absolute)
|
165 |
+
|
166 |
+
|
167 |
+
class AugMixDataset(torch.utils.data.Dataset):
|
168 |
+
"""Dataset wrapper to perform AugMix or other clean/augmentation mixes"""
|
169 |
+
|
170 |
+
def __init__(self, dataset, num_splits=2):
|
171 |
+
self.augmentation = None
|
172 |
+
self.normalize = None
|
173 |
+
self.dataset = dataset
|
174 |
+
if self.dataset.transform is not None:
|
175 |
+
self._set_transforms(self.dataset.transform)
|
176 |
+
self.num_splits = num_splits
|
177 |
+
|
178 |
+
def _set_transforms(self, x):
|
179 |
+
assert isinstance(x, (list, tuple)) and len(x) == 3, 'Expecting a tuple/list of 3 transforms'
|
180 |
+
self.dataset.transform = x[0]
|
181 |
+
self.augmentation = x[1]
|
182 |
+
self.normalize = x[2]
|
183 |
+
|
184 |
+
@property
|
185 |
+
def transform(self):
|
186 |
+
return self.dataset.transform
|
187 |
+
|
188 |
+
@transform.setter
|
189 |
+
def transform(self, x):
|
190 |
+
self._set_transforms(x)
|
191 |
+
|
192 |
+
def _normalize(self, x):
|
193 |
+
return x if self.normalize is None else self.normalize(x)
|
194 |
+
|
195 |
+
def __getitem__(self, i):
|
196 |
+
x, y = self.dataset[i] # all splits share the same dataset base transform
|
197 |
+
x_list = [self._normalize(x)] # first split only normalizes (this is the 'clean' split)
|
198 |
+
# run the full augmentation on the remaining splits
|
199 |
+
for _ in range(self.num_splits - 1):
|
200 |
+
x_list.append(self._normalize(self.augmentation(x)))
|
201 |
+
return tuple(x_list), y
|
202 |
+
|
203 |
+
def __len__(self):
|
204 |
+
return len(self.dataset)
|
pytorch-image-models/timm/data/dataset_factory.py
ADDED
@@ -0,0 +1,229 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
""" Dataset Factory
|
2 |
+
|
3 |
+
Hacked together by / Copyright 2021, Ross Wightman
|
4 |
+
"""
|
5 |
+
import os
|
6 |
+
from typing import Optional
|
7 |
+
|
8 |
+
from torchvision.datasets import CIFAR100, CIFAR10, MNIST, KMNIST, FashionMNIST, ImageFolder
|
9 |
+
try:
|
10 |
+
from torchvision.datasets import Places365
|
11 |
+
has_places365 = True
|
12 |
+
except ImportError:
|
13 |
+
has_places365 = False
|
14 |
+
try:
|
15 |
+
from torchvision.datasets import INaturalist
|
16 |
+
has_inaturalist = True
|
17 |
+
except ImportError:
|
18 |
+
has_inaturalist = False
|
19 |
+
try:
|
20 |
+
from torchvision.datasets import QMNIST
|
21 |
+
has_qmnist = True
|
22 |
+
except ImportError:
|
23 |
+
has_qmnist = False
|
24 |
+
try:
|
25 |
+
from torchvision.datasets import ImageNet
|
26 |
+
has_imagenet = True
|
27 |
+
except ImportError:
|
28 |
+
has_imagenet = False
|
29 |
+
|
30 |
+
from .dataset import IterableImageDataset, ImageDataset
|
31 |
+
|
32 |
+
_TORCH_BASIC_DS = dict(
|
33 |
+
cifar10=CIFAR10,
|
34 |
+
cifar100=CIFAR100,
|
35 |
+
mnist=MNIST,
|
36 |
+
kmnist=KMNIST,
|
37 |
+
fashion_mnist=FashionMNIST,
|
38 |
+
)
|
39 |
+
_TRAIN_SYNONYM = dict(train=None, training=None)
|
40 |
+
_EVAL_SYNONYM = dict(val=None, valid=None, validation=None, eval=None, evaluation=None)
|
41 |
+
|
42 |
+
|
43 |
+
def _search_split(root, split):
|
44 |
+
# look for sub-folder with name of split in root and use that if it exists
|
45 |
+
split_name = split.split('[')[0]
|
46 |
+
try_root = os.path.join(root, split_name)
|
47 |
+
if os.path.exists(try_root):
|
48 |
+
return try_root
|
49 |
+
|
50 |
+
def _try(syn):
|
51 |
+
for s in syn:
|
52 |
+
try_root = os.path.join(root, s)
|
53 |
+
if os.path.exists(try_root):
|
54 |
+
return try_root
|
55 |
+
return root
|
56 |
+
if split_name in _TRAIN_SYNONYM:
|
57 |
+
root = _try(_TRAIN_SYNONYM)
|
58 |
+
elif split_name in _EVAL_SYNONYM:
|
59 |
+
root = _try(_EVAL_SYNONYM)
|
60 |
+
return root
|
61 |
+
|
62 |
+
|
63 |
+
def create_dataset(
|
64 |
+
name: str,
|
65 |
+
root: Optional[str] = None,
|
66 |
+
split: str = 'validation',
|
67 |
+
search_split: bool = True,
|
68 |
+
class_map: dict = None,
|
69 |
+
load_bytes: bool = False,
|
70 |
+
is_training: bool = False,
|
71 |
+
download: bool = False,
|
72 |
+
batch_size: int = 1,
|
73 |
+
num_samples: Optional[int] = None,
|
74 |
+
seed: int = 42,
|
75 |
+
repeats: int = 0,
|
76 |
+
input_img_mode: str = 'RGB',
|
77 |
+
trust_remote_code: bool = False,
|
78 |
+
**kwargs,
|
79 |
+
):
|
80 |
+
""" Dataset factory method
|
81 |
+
|
82 |
+
In parentheses after each arg are the type of dataset supported for each arg, one of:
|
83 |
+
* Folder - default, timm folder (or tar) based ImageDataset
|
84 |
+
* Torch - torchvision based datasets
|
85 |
+
* HFDS - Hugging Face Datasets
|
86 |
+
* HFIDS - Hugging Face Datasets Iterable (streaming mode, with IterableDataset)
|
87 |
+
* TFDS - Tensorflow-datasets wrapper in IterabeDataset interface via IterableImageDataset
|
88 |
+
* WDS - Webdataset
|
89 |
+
* All - any of the above
|
90 |
+
|
91 |
+
Args:
|
92 |
+
name: Dataset name, empty is okay for folder based datasets
|
93 |
+
root: Root folder of dataset (All)
|
94 |
+
split: Dataset split (All)
|
95 |
+
search_split: Search for split specific child fold from root so one can specify
|
96 |
+
`imagenet/` instead of `/imagenet/val`, etc on cmd line / config. (Folder, Torch)
|
97 |
+
class_map: Specify class -> index mapping via text file or dict (Folder)
|
98 |
+
load_bytes: Load data, return images as undecoded bytes (Folder)
|
99 |
+
download: Download dataset if not present and supported (HFIDS, TFDS, Torch)
|
100 |
+
is_training: Create dataset in train mode, this is different from the split.
|
101 |
+
For Iterable / TDFS it enables shuffle, ignored for other datasets. (TFDS, WDS, HFIDS)
|
102 |
+
batch_size: Batch size hint for iterable datasets (TFDS, WDS, HFIDS)
|
103 |
+
seed: Seed for iterable datasets (TFDS, WDS, HFIDS)
|
104 |
+
repeats: Dataset repeats per iteration i.e. epoch (TFDS, WDS, HFIDS)
|
105 |
+
input_img_mode: Input image color conversion mode e.g. 'RGB', 'L' (folder, TFDS, WDS, HFDS, HFIDS)
|
106 |
+
trust_remote_code: Trust remote code in Hugging Face Datasets if True (HFDS, HFIDS)
|
107 |
+
**kwargs: Other args to pass through to underlying Dataset and/or Reader classes
|
108 |
+
|
109 |
+
Returns:
|
110 |
+
Dataset object
|
111 |
+
"""
|
112 |
+
kwargs = {k: v for k, v in kwargs.items() if v is not None}
|
113 |
+
name = name.lower()
|
114 |
+
if name.startswith('torch/'):
|
115 |
+
name = name.split('/', 2)[-1]
|
116 |
+
torch_kwargs = dict(root=root, download=download, **kwargs)
|
117 |
+
if name in _TORCH_BASIC_DS:
|
118 |
+
ds_class = _TORCH_BASIC_DS[name]
|
119 |
+
use_train = split in _TRAIN_SYNONYM
|
120 |
+
ds = ds_class(train=use_train, **torch_kwargs)
|
121 |
+
elif name == 'inaturalist' or name == 'inat':
|
122 |
+
assert has_inaturalist, 'Please update to PyTorch 1.10, torchvision 0.11+ for Inaturalist'
|
123 |
+
target_type = 'full'
|
124 |
+
split_split = split.split('/')
|
125 |
+
if len(split_split) > 1:
|
126 |
+
target_type = split_split[0].split('_')
|
127 |
+
if len(target_type) == 1:
|
128 |
+
target_type = target_type[0]
|
129 |
+
split = split_split[-1]
|
130 |
+
if split in _TRAIN_SYNONYM:
|
131 |
+
split = '2021_train'
|
132 |
+
elif split in _EVAL_SYNONYM:
|
133 |
+
split = '2021_valid'
|
134 |
+
ds = INaturalist(version=split, target_type=target_type, **torch_kwargs)
|
135 |
+
elif name == 'places365':
|
136 |
+
assert has_places365, 'Please update to a newer PyTorch and torchvision for Places365 dataset.'
|
137 |
+
if split in _TRAIN_SYNONYM:
|
138 |
+
split = 'train-standard'
|
139 |
+
elif split in _EVAL_SYNONYM:
|
140 |
+
split = 'val'
|
141 |
+
ds = Places365(split=split, **torch_kwargs)
|
142 |
+
elif name == 'qmnist':
|
143 |
+
assert has_qmnist, 'Please update to a newer PyTorch and torchvision for QMNIST dataset.'
|
144 |
+
use_train = split in _TRAIN_SYNONYM
|
145 |
+
ds = QMNIST(train=use_train, **torch_kwargs)
|
146 |
+
elif name == 'imagenet':
|
147 |
+
assert has_imagenet, 'Please update to a newer PyTorch and torchvision for ImageNet dataset.'
|
148 |
+
if split in _EVAL_SYNONYM:
|
149 |
+
split = 'val'
|
150 |
+
ds = ImageNet(split=split, **torch_kwargs)
|
151 |
+
elif name == 'image_folder' or name == 'folder':
|
152 |
+
# in case torchvision ImageFolder is preferred over timm ImageDataset for some reason
|
153 |
+
if search_split and os.path.isdir(root):
|
154 |
+
# look for split specific sub-folder in root
|
155 |
+
root = _search_split(root, split)
|
156 |
+
ds = ImageFolder(root, **kwargs)
|
157 |
+
else:
|
158 |
+
assert False, f"Unknown torchvision dataset {name}"
|
159 |
+
elif name.startswith('hfds/'):
|
160 |
+
# NOTE right now, HF datasets default arrow format is a random-access Dataset,
|
161 |
+
# There will be a IterableDataset variant too, TBD
|
162 |
+
ds = ImageDataset(
|
163 |
+
root,
|
164 |
+
reader=name,
|
165 |
+
split=split,
|
166 |
+
class_map=class_map,
|
167 |
+
input_img_mode=input_img_mode,
|
168 |
+
trust_remote_code=trust_remote_code,
|
169 |
+
**kwargs,
|
170 |
+
)
|
171 |
+
elif name.startswith('hfids/'):
|
172 |
+
ds = IterableImageDataset(
|
173 |
+
root,
|
174 |
+
reader=name,
|
175 |
+
split=split,
|
176 |
+
class_map=class_map,
|
177 |
+
is_training=is_training,
|
178 |
+
download=download,
|
179 |
+
batch_size=batch_size,
|
180 |
+
num_samples=num_samples,
|
181 |
+
repeats=repeats,
|
182 |
+
seed=seed,
|
183 |
+
input_img_mode=input_img_mode,
|
184 |
+
trust_remote_code=trust_remote_code,
|
185 |
+
**kwargs,
|
186 |
+
)
|
187 |
+
elif name.startswith('tfds/'):
|
188 |
+
ds = IterableImageDataset(
|
189 |
+
root,
|
190 |
+
reader=name,
|
191 |
+
split=split,
|
192 |
+
class_map=class_map,
|
193 |
+
is_training=is_training,
|
194 |
+
download=download,
|
195 |
+
batch_size=batch_size,
|
196 |
+
num_samples=num_samples,
|
197 |
+
repeats=repeats,
|
198 |
+
seed=seed,
|
199 |
+
input_img_mode=input_img_mode,
|
200 |
+
**kwargs
|
201 |
+
)
|
202 |
+
elif name.startswith('wds/'):
|
203 |
+
ds = IterableImageDataset(
|
204 |
+
root,
|
205 |
+
reader=name,
|
206 |
+
split=split,
|
207 |
+
class_map=class_map,
|
208 |
+
is_training=is_training,
|
209 |
+
batch_size=batch_size,
|
210 |
+
num_samples=num_samples,
|
211 |
+
repeats=repeats,
|
212 |
+
seed=seed,
|
213 |
+
input_img_mode=input_img_mode,
|
214 |
+
**kwargs
|
215 |
+
)
|
216 |
+
else:
|
217 |
+
# FIXME support more advance split cfg for ImageFolder/Tar datasets in the future
|
218 |
+
if search_split and os.path.isdir(root):
|
219 |
+
# look for split specific sub-folder in root
|
220 |
+
root = _search_split(root, split)
|
221 |
+
ds = ImageDataset(
|
222 |
+
root,
|
223 |
+
reader=name,
|
224 |
+
class_map=class_map,
|
225 |
+
load_bytes=load_bytes,
|
226 |
+
input_img_mode=input_img_mode,
|
227 |
+
**kwargs,
|
228 |
+
)
|
229 |
+
return ds
|
pytorch-image-models/timm/data/dataset_info.py
ADDED
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from abc import ABC, abstractmethod
|
2 |
+
from typing import Dict, List, Optional, Union
|
3 |
+
|
4 |
+
|
5 |
+
class DatasetInfo(ABC):
|
6 |
+
|
7 |
+
def __init__(self):
|
8 |
+
pass
|
9 |
+
|
10 |
+
@abstractmethod
|
11 |
+
def num_classes(self):
|
12 |
+
pass
|
13 |
+
|
14 |
+
@abstractmethod
|
15 |
+
def label_names(self):
|
16 |
+
pass
|
17 |
+
|
18 |
+
@abstractmethod
|
19 |
+
def label_descriptions(self, detailed: bool = False, as_dict: bool = False) -> Union[List[str], Dict[str, str]]:
|
20 |
+
pass
|
21 |
+
|
22 |
+
@abstractmethod
|
23 |
+
def index_to_label_name(self, index) -> str:
|
24 |
+
pass
|
25 |
+
|
26 |
+
@abstractmethod
|
27 |
+
def index_to_description(self, index: int, detailed: bool = False) -> str:
|
28 |
+
pass
|
29 |
+
|
30 |
+
@abstractmethod
|
31 |
+
def label_name_to_description(self, label: str, detailed: bool = False) -> str:
|
32 |
+
pass
|
33 |
+
|
34 |
+
|
35 |
+
class CustomDatasetInfo(DatasetInfo):
|
36 |
+
""" DatasetInfo that wraps passed values for custom datasets."""
|
37 |
+
|
38 |
+
def __init__(
|
39 |
+
self,
|
40 |
+
label_names: Union[List[str], Dict[int, str]],
|
41 |
+
label_descriptions: Optional[Dict[str, str]] = None
|
42 |
+
):
|
43 |
+
super().__init__()
|
44 |
+
assert len(label_names) > 0
|
45 |
+
self._label_names = label_names # label index => label name mapping
|
46 |
+
self._label_descriptions = label_descriptions # label name => label description mapping
|
47 |
+
if self._label_descriptions is not None:
|
48 |
+
# validate descriptions (label names required)
|
49 |
+
assert isinstance(self._label_descriptions, dict)
|
50 |
+
for n in self._label_names:
|
51 |
+
assert n in self._label_descriptions
|
52 |
+
|
53 |
+
def num_classes(self):
|
54 |
+
return len(self._label_names)
|
55 |
+
|
56 |
+
def label_names(self):
|
57 |
+
return self._label_names
|
58 |
+
|
59 |
+
def label_descriptions(self, detailed: bool = False, as_dict: bool = False) -> Union[List[str], Dict[str, str]]:
|
60 |
+
return self._label_descriptions
|
61 |
+
|
62 |
+
def label_name_to_description(self, label: str, detailed: bool = False) -> str:
|
63 |
+
if self._label_descriptions:
|
64 |
+
return self._label_descriptions[label]
|
65 |
+
return label # return label name itself if a descriptions is not present
|
66 |
+
|
67 |
+
def index_to_label_name(self, index) -> str:
|
68 |
+
assert 0 <= index < len(self._label_names)
|
69 |
+
return self._label_names[index]
|
70 |
+
|
71 |
+
def index_to_description(self, index: int, detailed: bool = False) -> str:
|
72 |
+
label = self.index_to_label_name(index)
|
73 |
+
return self.label_name_to_description(label, detailed=detailed)
|
pytorch-image-models/timm/data/distributed_sampler.py
ADDED
@@ -0,0 +1,135 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import math
|
2 |
+
import torch
|
3 |
+
from torch.utils.data import Sampler
|
4 |
+
import torch.distributed as dist
|
5 |
+
|
6 |
+
|
7 |
+
class OrderedDistributedSampler(Sampler):
|
8 |
+
"""Sampler that restricts data loading to a subset of the dataset.
|
9 |
+
It is especially useful in conjunction with
|
10 |
+
:class:`torch.nn.parallel.DistributedDataParallel`. In such case, each
|
11 |
+
process can pass a DistributedSampler instance as a DataLoader sampler,
|
12 |
+
and load a subset of the original dataset that is exclusive to it.
|
13 |
+
.. note::
|
14 |
+
Dataset is assumed to be of constant size.
|
15 |
+
Arguments:
|
16 |
+
dataset: Dataset used for sampling.
|
17 |
+
num_replicas (optional): Number of processes participating in
|
18 |
+
distributed training.
|
19 |
+
rank (optional): Rank of the current process within num_replicas.
|
20 |
+
"""
|
21 |
+
|
22 |
+
def __init__(self, dataset, num_replicas=None, rank=None):
|
23 |
+
if num_replicas is None:
|
24 |
+
if not dist.is_available():
|
25 |
+
raise RuntimeError("Requires distributed package to be available")
|
26 |
+
num_replicas = dist.get_world_size()
|
27 |
+
if rank is None:
|
28 |
+
if not dist.is_available():
|
29 |
+
raise RuntimeError("Requires distributed package to be available")
|
30 |
+
rank = dist.get_rank()
|
31 |
+
self.dataset = dataset
|
32 |
+
self.num_replicas = num_replicas
|
33 |
+
self.rank = rank
|
34 |
+
self.num_samples = int(math.ceil(len(self.dataset) * 1.0 / self.num_replicas))
|
35 |
+
self.total_size = self.num_samples * self.num_replicas
|
36 |
+
|
37 |
+
def __iter__(self):
|
38 |
+
indices = list(range(len(self.dataset)))
|
39 |
+
|
40 |
+
# add extra samples to make it evenly divisible
|
41 |
+
indices += indices[:(self.total_size - len(indices))]
|
42 |
+
assert len(indices) == self.total_size
|
43 |
+
|
44 |
+
# subsample
|
45 |
+
indices = indices[self.rank:self.total_size:self.num_replicas]
|
46 |
+
assert len(indices) == self.num_samples
|
47 |
+
|
48 |
+
return iter(indices)
|
49 |
+
|
50 |
+
def __len__(self):
|
51 |
+
return self.num_samples
|
52 |
+
|
53 |
+
|
54 |
+
class RepeatAugSampler(Sampler):
|
55 |
+
"""Sampler that restricts data loading to a subset of the dataset for distributed,
|
56 |
+
with repeated augmentation.
|
57 |
+
It ensures that different each augmented version of a sample will be visible to a
|
58 |
+
different process (GPU). Heavily based on torch.utils.data.DistributedSampler
|
59 |
+
|
60 |
+
This sampler was taken from https://github.com/facebookresearch/deit/blob/0c4b8f60/samplers.py
|
61 |
+
Used in
|
62 |
+
Copyright (c) 2015-present, Facebook, Inc.
|
63 |
+
"""
|
64 |
+
|
65 |
+
def __init__(
|
66 |
+
self,
|
67 |
+
dataset,
|
68 |
+
num_replicas=None,
|
69 |
+
rank=None,
|
70 |
+
shuffle=True,
|
71 |
+
num_repeats=3,
|
72 |
+
selected_round=256,
|
73 |
+
selected_ratio=0,
|
74 |
+
):
|
75 |
+
if num_replicas is None:
|
76 |
+
if not dist.is_available():
|
77 |
+
raise RuntimeError("Requires distributed package to be available")
|
78 |
+
num_replicas = dist.get_world_size()
|
79 |
+
if rank is None:
|
80 |
+
if not dist.is_available():
|
81 |
+
raise RuntimeError("Requires distributed package to be available")
|
82 |
+
rank = dist.get_rank()
|
83 |
+
self.dataset = dataset
|
84 |
+
self.num_replicas = num_replicas
|
85 |
+
self.rank = rank
|
86 |
+
self.shuffle = shuffle
|
87 |
+
self.num_repeats = num_repeats
|
88 |
+
self.epoch = 0
|
89 |
+
self.num_samples = int(math.ceil(len(self.dataset) * num_repeats / self.num_replicas))
|
90 |
+
self.total_size = self.num_samples * self.num_replicas
|
91 |
+
# Determine the number of samples to select per epoch for each rank.
|
92 |
+
# num_selected logic defaults to be the same as original RASampler impl, but this one can be tweaked
|
93 |
+
# via selected_ratio and selected_round args.
|
94 |
+
selected_ratio = selected_ratio or num_replicas # ratio to reduce selected samples by, num_replicas if 0
|
95 |
+
if selected_round:
|
96 |
+
self.num_selected_samples = int(math.floor(
|
97 |
+
len(self.dataset) // selected_round * selected_round / selected_ratio))
|
98 |
+
else:
|
99 |
+
self.num_selected_samples = int(math.ceil(len(self.dataset) / selected_ratio))
|
100 |
+
|
101 |
+
def __iter__(self):
|
102 |
+
# deterministically shuffle based on epoch
|
103 |
+
g = torch.Generator()
|
104 |
+
g.manual_seed(self.epoch)
|
105 |
+
if self.shuffle:
|
106 |
+
indices = torch.randperm(len(self.dataset), generator=g)
|
107 |
+
else:
|
108 |
+
indices = torch.arange(start=0, end=len(self.dataset))
|
109 |
+
|
110 |
+
# produce repeats e.g. [0, 0, 0, 1, 1, 1, 2, 2, 2....]
|
111 |
+
if isinstance(self.num_repeats, float) and not self.num_repeats.is_integer():
|
112 |
+
# resample for repeats w/ non-integer ratio
|
113 |
+
repeat_size = math.ceil(self.num_repeats * len(self.dataset))
|
114 |
+
indices = indices[torch.tensor([int(i // self.num_repeats) for i in range(repeat_size)])]
|
115 |
+
else:
|
116 |
+
indices = torch.repeat_interleave(indices, repeats=int(self.num_repeats), dim=0)
|
117 |
+
indices = indices.tolist() # leaving as tensor thrashes dataloader memory
|
118 |
+
# add extra samples to make it evenly divisible
|
119 |
+
padding_size = self.total_size - len(indices)
|
120 |
+
if padding_size > 0:
|
121 |
+
indices += indices[:padding_size]
|
122 |
+
assert len(indices) == self.total_size
|
123 |
+
|
124 |
+
# subsample per rank
|
125 |
+
indices = indices[self.rank:self.total_size:self.num_replicas]
|
126 |
+
assert len(indices) == self.num_samples
|
127 |
+
|
128 |
+
# return up to num selected samples
|
129 |
+
return iter(indices[:self.num_selected_samples])
|
130 |
+
|
131 |
+
def __len__(self):
|
132 |
+
return self.num_selected_samples
|
133 |
+
|
134 |
+
def set_epoch(self, epoch):
|
135 |
+
self.epoch = epoch
|
pytorch-image-models/timm/data/imagenet_info.py
ADDED
@@ -0,0 +1,95 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import csv
|
2 |
+
import os
|
3 |
+
import pkgutil
|
4 |
+
import re
|
5 |
+
from typing import Dict, List, Optional, Union
|
6 |
+
|
7 |
+
from .dataset_info import DatasetInfo
|
8 |
+
|
9 |
+
|
10 |
+
# NOTE no ambiguity wrt to mapping from # classes to ImageNet subset so far, but likely to change
|
11 |
+
_NUM_CLASSES_TO_SUBSET = {
|
12 |
+
1000: 'imagenet-1k',
|
13 |
+
11221: 'imagenet-21k-miil', # miil subset of fall11
|
14 |
+
11821: 'imagenet-12k', # timm specific 12k subset of fall11
|
15 |
+
21841: 'imagenet-22k', # as in fall11.tar
|
16 |
+
21842: 'imagenet-22k-ms', # a Microsoft (for FocalNet) remapping of 22k w/ moves ImageNet-1k classes to first 1000
|
17 |
+
21843: 'imagenet-21k-goog', # Google's ImageNet full has two classes not in fall11
|
18 |
+
}
|
19 |
+
|
20 |
+
_SUBSETS = {
|
21 |
+
'imagenet1k': 'imagenet_synsets.txt',
|
22 |
+
'imagenet12k': 'imagenet12k_synsets.txt',
|
23 |
+
'imagenet22k': 'imagenet22k_synsets.txt',
|
24 |
+
'imagenet21k': 'imagenet21k_goog_synsets.txt',
|
25 |
+
'imagenet21kgoog': 'imagenet21k_goog_synsets.txt',
|
26 |
+
'imagenet21kmiil': 'imagenet21k_miil_synsets.txt',
|
27 |
+
'imagenet22kms': 'imagenet22k_ms_synsets.txt',
|
28 |
+
}
|
29 |
+
_LEMMA_FILE = 'imagenet_synset_to_lemma.txt'
|
30 |
+
_DEFINITION_FILE = 'imagenet_synset_to_definition.txt'
|
31 |
+
|
32 |
+
|
33 |
+
def infer_imagenet_subset(model_or_cfg) -> Optional[str]:
|
34 |
+
if isinstance(model_or_cfg, dict):
|
35 |
+
num_classes = model_or_cfg.get('num_classes', None)
|
36 |
+
else:
|
37 |
+
num_classes = getattr(model_or_cfg, 'num_classes', None)
|
38 |
+
if not num_classes:
|
39 |
+
pretrained_cfg = getattr(model_or_cfg, 'pretrained_cfg', {})
|
40 |
+
# FIXME at some point pretrained_cfg should include dataset-tag,
|
41 |
+
# which will be more robust than a guess based on num_classes
|
42 |
+
num_classes = pretrained_cfg.get('num_classes', None)
|
43 |
+
if not num_classes or num_classes not in _NUM_CLASSES_TO_SUBSET:
|
44 |
+
return None
|
45 |
+
return _NUM_CLASSES_TO_SUBSET[num_classes]
|
46 |
+
|
47 |
+
|
48 |
+
class ImageNetInfo(DatasetInfo):
|
49 |
+
|
50 |
+
def __init__(self, subset: str = 'imagenet-1k'):
|
51 |
+
super().__init__()
|
52 |
+
subset = re.sub(r'[-_\s]', '', subset.lower())
|
53 |
+
assert subset in _SUBSETS, f'Unknown imagenet subset {subset}.'
|
54 |
+
|
55 |
+
# WordNet synsets (part-of-speach + offset) are the unique class label names for ImageNet classifiers
|
56 |
+
synset_file = _SUBSETS[subset]
|
57 |
+
synset_data = pkgutil.get_data(__name__, os.path.join('_info', synset_file))
|
58 |
+
self._synsets = synset_data.decode('utf-8').splitlines()
|
59 |
+
|
60 |
+
# WordNet lemmas (canonical dictionary form of word) and definitions are used to build
|
61 |
+
# the class descriptions. If detailed=True both are used, otherwise just the lemmas.
|
62 |
+
lemma_data = pkgutil.get_data(__name__, os.path.join('_info', _LEMMA_FILE))
|
63 |
+
reader = csv.reader(lemma_data.decode('utf-8').splitlines(), delimiter='\t')
|
64 |
+
self._lemmas = dict(reader)
|
65 |
+
definition_data = pkgutil.get_data(__name__, os.path.join('_info', _DEFINITION_FILE))
|
66 |
+
reader = csv.reader(definition_data.decode('utf-8').splitlines(), delimiter='\t')
|
67 |
+
self._definitions = dict(reader)
|
68 |
+
|
69 |
+
def num_classes(self):
|
70 |
+
return len(self._synsets)
|
71 |
+
|
72 |
+
def label_names(self):
|
73 |
+
return self._synsets
|
74 |
+
|
75 |
+
def label_descriptions(self, detailed: bool = False, as_dict: bool = False) -> Union[List[str], Dict[str, str]]:
|
76 |
+
if as_dict:
|
77 |
+
return {label: self.label_name_to_description(label, detailed=detailed) for label in self._synsets}
|
78 |
+
else:
|
79 |
+
return [self.label_name_to_description(label, detailed=detailed) for label in self._synsets]
|
80 |
+
|
81 |
+
def index_to_label_name(self, index) -> str:
|
82 |
+
assert 0 <= index < len(self._synsets), \
|
83 |
+
f'Index ({index}) out of range for dataset with {len(self._synsets)} classes.'
|
84 |
+
return self._synsets[index]
|
85 |
+
|
86 |
+
def index_to_description(self, index: int, detailed: bool = False) -> str:
|
87 |
+
label = self.index_to_label_name(index)
|
88 |
+
return self.label_name_to_description(label, detailed=detailed)
|
89 |
+
|
90 |
+
def label_name_to_description(self, label: str, detailed: bool = False) -> str:
|
91 |
+
if detailed:
|
92 |
+
description = f'{self._lemmas[label]}: {self._definitions[label]}'
|
93 |
+
else:
|
94 |
+
description = f'{self._lemmas[label]}'
|
95 |
+
return description
|
pytorch-image-models/timm/data/loader.py
ADDED
@@ -0,0 +1,409 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
""" Loader Factory, Fast Collate, CUDA Prefetcher
|
2 |
+
|
3 |
+
Prefetcher and Fast Collate inspired by NVIDIA APEX example at
|
4 |
+
https://github.com/NVIDIA/apex/commit/d5e2bb4bdeedd27b1dfaf5bb2b24d6c000dee9be#diff-cf86c282ff7fba81fad27a559379d5bf
|
5 |
+
|
6 |
+
Hacked together by / Copyright 2019, Ross Wightman
|
7 |
+
"""
|
8 |
+
import logging
|
9 |
+
import random
|
10 |
+
from contextlib import suppress
|
11 |
+
from functools import partial
|
12 |
+
from itertools import repeat
|
13 |
+
from typing import Callable, Optional, Tuple, Union
|
14 |
+
|
15 |
+
import torch
|
16 |
+
import torch.utils.data
|
17 |
+
import numpy as np
|
18 |
+
|
19 |
+
from .constants import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD
|
20 |
+
from .dataset import IterableImageDataset, ImageDataset
|
21 |
+
from .distributed_sampler import OrderedDistributedSampler, RepeatAugSampler
|
22 |
+
from .random_erasing import RandomErasing
|
23 |
+
from .mixup import FastCollateMixup
|
24 |
+
from .transforms_factory import create_transform
|
25 |
+
|
26 |
+
_logger = logging.getLogger(__name__)
|
27 |
+
|
28 |
+
|
29 |
+
def fast_collate(batch):
|
30 |
+
""" A fast collation function optimized for uint8 images (np array or torch) and int64 targets (labels)"""
|
31 |
+
assert isinstance(batch[0], tuple)
|
32 |
+
batch_size = len(batch)
|
33 |
+
if isinstance(batch[0][0], tuple):
|
34 |
+
# This branch 'deinterleaves' and flattens tuples of input tensors into one tensor ordered by position
|
35 |
+
# such that all tuple of position n will end up in a torch.split(tensor, batch_size) in nth position
|
36 |
+
inner_tuple_size = len(batch[0][0])
|
37 |
+
flattened_batch_size = batch_size * inner_tuple_size
|
38 |
+
targets = torch.zeros(flattened_batch_size, dtype=torch.int64)
|
39 |
+
tensor = torch.zeros((flattened_batch_size, *batch[0][0][0].shape), dtype=torch.uint8)
|
40 |
+
for i in range(batch_size):
|
41 |
+
assert len(batch[i][0]) == inner_tuple_size # all input tensor tuples must be same length
|
42 |
+
for j in range(inner_tuple_size):
|
43 |
+
targets[i + j * batch_size] = batch[i][1]
|
44 |
+
tensor[i + j * batch_size] += torch.from_numpy(batch[i][0][j])
|
45 |
+
return tensor, targets
|
46 |
+
elif isinstance(batch[0][0], np.ndarray):
|
47 |
+
targets = torch.tensor([b[1] for b in batch], dtype=torch.int64)
|
48 |
+
assert len(targets) == batch_size
|
49 |
+
tensor = torch.zeros((batch_size, *batch[0][0].shape), dtype=torch.uint8)
|
50 |
+
for i in range(batch_size):
|
51 |
+
tensor[i] += torch.from_numpy(batch[i][0])
|
52 |
+
return tensor, targets
|
53 |
+
elif isinstance(batch[0][0], torch.Tensor):
|
54 |
+
targets = torch.tensor([b[1] for b in batch], dtype=torch.int64)
|
55 |
+
assert len(targets) == batch_size
|
56 |
+
tensor = torch.zeros((batch_size, *batch[0][0].shape), dtype=torch.uint8)
|
57 |
+
for i in range(batch_size):
|
58 |
+
tensor[i].copy_(batch[i][0])
|
59 |
+
return tensor, targets
|
60 |
+
else:
|
61 |
+
assert False
|
62 |
+
|
63 |
+
|
64 |
+
def adapt_to_chs(x, n):
|
65 |
+
if not isinstance(x, (tuple, list)):
|
66 |
+
x = tuple(repeat(x, n))
|
67 |
+
elif len(x) != n:
|
68 |
+
x_mean = np.mean(x).item()
|
69 |
+
x = (x_mean,) * n
|
70 |
+
_logger.warning(f'Pretrained mean/std different shape than model, using avg value {x}.')
|
71 |
+
else:
|
72 |
+
assert len(x) == n, 'normalization stats must match image channels'
|
73 |
+
return x
|
74 |
+
|
75 |
+
|
76 |
+
class PrefetchLoader:
|
77 |
+
|
78 |
+
def __init__(
|
79 |
+
self,
|
80 |
+
loader,
|
81 |
+
mean=IMAGENET_DEFAULT_MEAN,
|
82 |
+
std=IMAGENET_DEFAULT_STD,
|
83 |
+
channels=3,
|
84 |
+
device=torch.device('cuda'),
|
85 |
+
img_dtype=torch.float32,
|
86 |
+
fp16=False,
|
87 |
+
re_prob=0.,
|
88 |
+
re_mode='const',
|
89 |
+
re_count=1,
|
90 |
+
re_num_splits=0):
|
91 |
+
|
92 |
+
mean = adapt_to_chs(mean, channels)
|
93 |
+
std = adapt_to_chs(std, channels)
|
94 |
+
normalization_shape = (1, channels, 1, 1)
|
95 |
+
|
96 |
+
self.loader = loader
|
97 |
+
self.device = device
|
98 |
+
if fp16:
|
99 |
+
# fp16 arg is deprecated, but will override dtype arg if set for bwd compat
|
100 |
+
img_dtype = torch.float16
|
101 |
+
self.img_dtype = img_dtype
|
102 |
+
self.mean = torch.tensor(
|
103 |
+
[x * 255 for x in mean], device=device, dtype=img_dtype).view(normalization_shape)
|
104 |
+
self.std = torch.tensor(
|
105 |
+
[x * 255 for x in std], device=device, dtype=img_dtype).view(normalization_shape)
|
106 |
+
if re_prob > 0.:
|
107 |
+
self.random_erasing = RandomErasing(
|
108 |
+
probability=re_prob,
|
109 |
+
mode=re_mode,
|
110 |
+
max_count=re_count,
|
111 |
+
num_splits=re_num_splits,
|
112 |
+
device=device,
|
113 |
+
)
|
114 |
+
else:
|
115 |
+
self.random_erasing = None
|
116 |
+
self.is_cuda = device.type == 'cuda' and torch.cuda.is_available()
|
117 |
+
self.is_npu = device.type == 'npu' and torch.npu.is_available()
|
118 |
+
|
119 |
+
def __iter__(self):
|
120 |
+
first = True
|
121 |
+
if self.is_cuda:
|
122 |
+
stream = torch.cuda.Stream()
|
123 |
+
stream_context = partial(torch.cuda.stream, stream=stream)
|
124 |
+
elif self.is_npu:
|
125 |
+
stream = torch.npu.Stream()
|
126 |
+
stream_context = partial(torch.npu.stream, stream=stream)
|
127 |
+
else:
|
128 |
+
stream = None
|
129 |
+
stream_context = suppress
|
130 |
+
|
131 |
+
for next_input, next_target in self.loader:
|
132 |
+
|
133 |
+
with stream_context():
|
134 |
+
next_input = next_input.to(device=self.device, non_blocking=True)
|
135 |
+
next_target = next_target.to(device=self.device, non_blocking=True)
|
136 |
+
next_input = next_input.to(self.img_dtype).sub_(self.mean).div_(self.std)
|
137 |
+
if self.random_erasing is not None:
|
138 |
+
next_input = self.random_erasing(next_input)
|
139 |
+
|
140 |
+
if not first:
|
141 |
+
yield input, target
|
142 |
+
else:
|
143 |
+
first = False
|
144 |
+
|
145 |
+
if stream is not None:
|
146 |
+
if self.is_cuda:
|
147 |
+
torch.cuda.current_stream().wait_stream(stream)
|
148 |
+
elif self.is_npu:
|
149 |
+
torch.npu.current_stream().wait_stream(stream)
|
150 |
+
|
151 |
+
input = next_input
|
152 |
+
target = next_target
|
153 |
+
|
154 |
+
yield input, target
|
155 |
+
|
156 |
+
def __len__(self):
|
157 |
+
return len(self.loader)
|
158 |
+
|
159 |
+
@property
|
160 |
+
def sampler(self):
|
161 |
+
return self.loader.sampler
|
162 |
+
|
163 |
+
@property
|
164 |
+
def dataset(self):
|
165 |
+
return self.loader.dataset
|
166 |
+
|
167 |
+
@property
|
168 |
+
def mixup_enabled(self):
|
169 |
+
if isinstance(self.loader.collate_fn, FastCollateMixup):
|
170 |
+
return self.loader.collate_fn.mixup_enabled
|
171 |
+
else:
|
172 |
+
return False
|
173 |
+
|
174 |
+
@mixup_enabled.setter
|
175 |
+
def mixup_enabled(self, x):
|
176 |
+
if isinstance(self.loader.collate_fn, FastCollateMixup):
|
177 |
+
self.loader.collate_fn.mixup_enabled = x
|
178 |
+
|
179 |
+
|
180 |
+
def _worker_init(worker_id, worker_seeding='all'):
|
181 |
+
worker_info = torch.utils.data.get_worker_info()
|
182 |
+
assert worker_info.id == worker_id
|
183 |
+
if isinstance(worker_seeding, Callable):
|
184 |
+
seed = worker_seeding(worker_info)
|
185 |
+
random.seed(seed)
|
186 |
+
torch.manual_seed(seed)
|
187 |
+
np.random.seed(seed % (2 ** 32 - 1))
|
188 |
+
else:
|
189 |
+
assert worker_seeding in ('all', 'part')
|
190 |
+
# random / torch seed already called in dataloader iter class w/ worker_info.seed
|
191 |
+
# to reproduce some old results (same seed + hparam combo), partial seeding is required (skip numpy re-seed)
|
192 |
+
if worker_seeding == 'all':
|
193 |
+
np.random.seed(worker_info.seed % (2 ** 32 - 1))
|
194 |
+
|
195 |
+
|
196 |
+
def create_loader(
|
197 |
+
dataset: Union[ImageDataset, IterableImageDataset],
|
198 |
+
input_size: Union[int, Tuple[int, int], Tuple[int, int, int]],
|
199 |
+
batch_size: int,
|
200 |
+
is_training: bool = False,
|
201 |
+
no_aug: bool = False,
|
202 |
+
re_prob: float = 0.,
|
203 |
+
re_mode: str = 'const',
|
204 |
+
re_count: int = 1,
|
205 |
+
re_split: bool = False,
|
206 |
+
train_crop_mode: Optional[str] = None,
|
207 |
+
scale: Optional[Tuple[float, float]] = None,
|
208 |
+
ratio: Optional[Tuple[float, float]] = None,
|
209 |
+
hflip: float = 0.5,
|
210 |
+
vflip: float = 0.,
|
211 |
+
color_jitter: float = 0.4,
|
212 |
+
color_jitter_prob: Optional[float] = None,
|
213 |
+
grayscale_prob: float = 0.,
|
214 |
+
gaussian_blur_prob: float = 0.,
|
215 |
+
auto_augment: Optional[str] = None,
|
216 |
+
num_aug_repeats: int = 0,
|
217 |
+
num_aug_splits: int = 0,
|
218 |
+
interpolation: str = 'bilinear',
|
219 |
+
mean: Tuple[float, ...] = IMAGENET_DEFAULT_MEAN,
|
220 |
+
std: Tuple[float, ...] = IMAGENET_DEFAULT_STD,
|
221 |
+
num_workers: int = 1,
|
222 |
+
distributed: bool = False,
|
223 |
+
crop_pct: Optional[float] = None,
|
224 |
+
crop_mode: Optional[str] = None,
|
225 |
+
crop_border_pixels: Optional[int] = None,
|
226 |
+
collate_fn: Optional[Callable] = None,
|
227 |
+
pin_memory: bool = False,
|
228 |
+
fp16: bool = False, # deprecated, use img_dtype
|
229 |
+
img_dtype: torch.dtype = torch.float32,
|
230 |
+
device: torch.device = torch.device('cuda'),
|
231 |
+
use_prefetcher: bool = True,
|
232 |
+
use_multi_epochs_loader: bool = False,
|
233 |
+
persistent_workers: bool = True,
|
234 |
+
worker_seeding: str = 'all',
|
235 |
+
tf_preprocessing: bool = False,
|
236 |
+
):
|
237 |
+
"""
|
238 |
+
|
239 |
+
Args:
|
240 |
+
dataset: The image dataset to load.
|
241 |
+
input_size: Target input size (channels, height, width) tuple or size scalar.
|
242 |
+
batch_size: Number of samples in a batch.
|
243 |
+
is_training: Return training (random) transforms.
|
244 |
+
no_aug: Disable augmentation for training (useful for debug).
|
245 |
+
re_prob: Random erasing probability.
|
246 |
+
re_mode: Random erasing fill mode.
|
247 |
+
re_count: Number of random erasing regions.
|
248 |
+
re_split: Control split of random erasing across batch size.
|
249 |
+
scale: Random resize scale range (crop area, < 1.0 => zoom in).
|
250 |
+
ratio: Random aspect ratio range (crop ratio for RRC, ratio adjustment factor for RKR).
|
251 |
+
hflip: Horizontal flip probability.
|
252 |
+
vflip: Vertical flip probability.
|
253 |
+
color_jitter: Random color jitter component factors (brightness, contrast, saturation, hue).
|
254 |
+
Scalar is applied as (scalar,) * 3 (no hue).
|
255 |
+
color_jitter_prob: Apply color jitter with this probability if not None (for SimlCLR-like aug
|
256 |
+
grayscale_prob: Probability of converting image to grayscale (for SimCLR-like aug).
|
257 |
+
gaussian_blur_prob: Probability of applying gaussian blur (for SimCLR-like aug).
|
258 |
+
auto_augment: Auto augment configuration string (see auto_augment.py).
|
259 |
+
num_aug_repeats: Enable special sampler to repeat same augmentation across distributed GPUs.
|
260 |
+
num_aug_splits: Enable mode where augmentations can be split across the batch.
|
261 |
+
interpolation: Image interpolation mode.
|
262 |
+
mean: Image normalization mean.
|
263 |
+
std: Image normalization standard deviation.
|
264 |
+
num_workers: Num worker processes per DataLoader.
|
265 |
+
distributed: Enable dataloading for distributed training.
|
266 |
+
crop_pct: Inference crop percentage (output size / resize size).
|
267 |
+
crop_mode: Inference crop mode. One of ['squash', 'border', 'center']. Defaults to 'center' when None.
|
268 |
+
crop_border_pixels: Inference crop border of specified # pixels around edge of original image.
|
269 |
+
collate_fn: Override default collate_fn.
|
270 |
+
pin_memory: Pin memory for device transfer.
|
271 |
+
fp16: Deprecated argument for half-precision input dtype. Use img_dtype.
|
272 |
+
img_dtype: Data type for input image.
|
273 |
+
device: Device to transfer inputs and targets to.
|
274 |
+
use_prefetcher: Use efficient pre-fetcher to load samples onto device.
|
275 |
+
use_multi_epochs_loader:
|
276 |
+
persistent_workers: Enable persistent worker processes.
|
277 |
+
worker_seeding: Control worker random seeding at init.
|
278 |
+
tf_preprocessing: Use TF 1.0 inference preprocessing for testing model ports.
|
279 |
+
|
280 |
+
Returns:
|
281 |
+
DataLoader
|
282 |
+
"""
|
283 |
+
re_num_splits = 0
|
284 |
+
if re_split:
|
285 |
+
# apply RE to second half of batch if no aug split otherwise line up with aug split
|
286 |
+
re_num_splits = num_aug_splits or 2
|
287 |
+
dataset.transform = create_transform(
|
288 |
+
input_size,
|
289 |
+
is_training=is_training,
|
290 |
+
no_aug=no_aug,
|
291 |
+
train_crop_mode=train_crop_mode,
|
292 |
+
scale=scale,
|
293 |
+
ratio=ratio,
|
294 |
+
hflip=hflip,
|
295 |
+
vflip=vflip,
|
296 |
+
color_jitter=color_jitter,
|
297 |
+
color_jitter_prob=color_jitter_prob,
|
298 |
+
grayscale_prob=grayscale_prob,
|
299 |
+
gaussian_blur_prob=gaussian_blur_prob,
|
300 |
+
auto_augment=auto_augment,
|
301 |
+
interpolation=interpolation,
|
302 |
+
mean=mean,
|
303 |
+
std=std,
|
304 |
+
crop_pct=crop_pct,
|
305 |
+
crop_mode=crop_mode,
|
306 |
+
crop_border_pixels=crop_border_pixels,
|
307 |
+
re_prob=re_prob,
|
308 |
+
re_mode=re_mode,
|
309 |
+
re_count=re_count,
|
310 |
+
re_num_splits=re_num_splits,
|
311 |
+
tf_preprocessing=tf_preprocessing,
|
312 |
+
use_prefetcher=use_prefetcher,
|
313 |
+
separate=num_aug_splits > 0,
|
314 |
+
)
|
315 |
+
|
316 |
+
if isinstance(dataset, IterableImageDataset):
|
317 |
+
# give Iterable datasets early knowledge of num_workers so that sample estimates
|
318 |
+
# are correct before worker processes are launched
|
319 |
+
dataset.set_loader_cfg(num_workers=num_workers)
|
320 |
+
|
321 |
+
sampler = None
|
322 |
+
if distributed and not isinstance(dataset, torch.utils.data.IterableDataset):
|
323 |
+
if is_training:
|
324 |
+
if num_aug_repeats:
|
325 |
+
sampler = RepeatAugSampler(dataset, num_repeats=num_aug_repeats)
|
326 |
+
else:
|
327 |
+
sampler = torch.utils.data.distributed.DistributedSampler(dataset)
|
328 |
+
else:
|
329 |
+
# This will add extra duplicate entries to result in equal num
|
330 |
+
# of samples per-process, will slightly alter validation results
|
331 |
+
sampler = OrderedDistributedSampler(dataset)
|
332 |
+
else:
|
333 |
+
assert num_aug_repeats == 0, "RepeatAugment not currently supported in non-distributed or IterableDataset use"
|
334 |
+
|
335 |
+
if collate_fn is None:
|
336 |
+
collate_fn = fast_collate if use_prefetcher else torch.utils.data.dataloader.default_collate
|
337 |
+
|
338 |
+
loader_class = torch.utils.data.DataLoader
|
339 |
+
if use_multi_epochs_loader:
|
340 |
+
loader_class = MultiEpochsDataLoader
|
341 |
+
|
342 |
+
loader_args = dict(
|
343 |
+
batch_size=batch_size,
|
344 |
+
shuffle=not isinstance(dataset, torch.utils.data.IterableDataset) and sampler is None and is_training,
|
345 |
+
num_workers=num_workers,
|
346 |
+
sampler=sampler,
|
347 |
+
collate_fn=collate_fn,
|
348 |
+
pin_memory=pin_memory,
|
349 |
+
drop_last=is_training,
|
350 |
+
worker_init_fn=partial(_worker_init, worker_seeding=worker_seeding),
|
351 |
+
persistent_workers=persistent_workers
|
352 |
+
)
|
353 |
+
try:
|
354 |
+
loader = loader_class(dataset, **loader_args)
|
355 |
+
except TypeError as e:
|
356 |
+
loader_args.pop('persistent_workers') # only in Pytorch 1.7+
|
357 |
+
loader = loader_class(dataset, **loader_args)
|
358 |
+
if use_prefetcher:
|
359 |
+
prefetch_re_prob = re_prob if is_training and not no_aug else 0.
|
360 |
+
loader = PrefetchLoader(
|
361 |
+
loader,
|
362 |
+
mean=mean,
|
363 |
+
std=std,
|
364 |
+
channels=input_size[0],
|
365 |
+
device=device,
|
366 |
+
fp16=fp16, # deprecated, use img_dtype
|
367 |
+
img_dtype=img_dtype,
|
368 |
+
re_prob=prefetch_re_prob,
|
369 |
+
re_mode=re_mode,
|
370 |
+
re_count=re_count,
|
371 |
+
re_num_splits=re_num_splits
|
372 |
+
)
|
373 |
+
|
374 |
+
return loader
|
375 |
+
|
376 |
+
|
377 |
+
class MultiEpochsDataLoader(torch.utils.data.DataLoader):
|
378 |
+
|
379 |
+
def __init__(self, *args, **kwargs):
|
380 |
+
super().__init__(*args, **kwargs)
|
381 |
+
self._DataLoader__initialized = False
|
382 |
+
if self.batch_sampler is None:
|
383 |
+
self.sampler = _RepeatSampler(self.sampler)
|
384 |
+
else:
|
385 |
+
self.batch_sampler = _RepeatSampler(self.batch_sampler)
|
386 |
+
self._DataLoader__initialized = True
|
387 |
+
self.iterator = super().__iter__()
|
388 |
+
|
389 |
+
def __len__(self):
|
390 |
+
return len(self.sampler) if self.batch_sampler is None else len(self.batch_sampler.sampler)
|
391 |
+
|
392 |
+
def __iter__(self):
|
393 |
+
for i in range(len(self)):
|
394 |
+
yield next(self.iterator)
|
395 |
+
|
396 |
+
|
397 |
+
class _RepeatSampler(object):
|
398 |
+
""" Sampler that repeats forever.
|
399 |
+
|
400 |
+
Args:
|
401 |
+
sampler (Sampler)
|
402 |
+
"""
|
403 |
+
|
404 |
+
def __init__(self, sampler):
|
405 |
+
self.sampler = sampler
|
406 |
+
|
407 |
+
def __iter__(self):
|
408 |
+
while True:
|
409 |
+
yield from iter(self.sampler)
|